context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Threading; using System.Collections.Generic; using System.Runtime.InteropServices; // Purpose of program: exercise the GC, with various object sizes and lifetimes. // Allocate objects that have an expiration time specified. When the object's lifetime expires, it is made garbage and then other new objects are created. // // Each object has a lifetime attached to it (in ObjectWrapper). New objects are added to a collection. When the lifetime is expired, the object is removed from the collection and subject to GC. // There are several threads which access the collection in random positions and if there is no object in that position they will create a new one. //One thread is responsible to updating the objects'age and removing expired objects. //The lifetime and the objects'size can be set by command line arguments. //The objects'size is organized in buckets (size ranges), the user can specify the percentage for each bucket. //Collection type can be array or binary tree. namespace StressAllocator { public class StressAllocator { //Define the size buckets: public struct SizeBucket { public int minsize; public int maxsize; public float percentage; //percentage of objects that fall into this bucket public SizeBucket(int min, int max, float percentObj) { minsize = min; maxsize = max; percentage = percentObj; } } //Buckets are defined as following: //Bucket1: 17 bytes to 200 bytes //Bucket2: 200bytes to 1000 bytes //Bucket3: 1000 bytes to 10000 bytes //Bucket4: 10000 bytes to 80000 bytes //The rest is over 85000 bytes (Large Object Heap) private const int SIZEBUCKET_COUNT = 4; private const int BUCKET1_MIN = 50; private const int BUCKET2_MIN = 200; private const int BUCKET3_MIN = 1000; private const int BUCKET4_MIN = 10000; private const int BUCKETS_MAX = 80000; ////// //// DEFAULT PARAMETERS //These parameters may be overriden by command line parameters public const int DEFAULT_MINLIFE = 3; //milliseconds public const int DEFAULT_MAXLIFE = 30; //milliseconds public const int DEFAULT_OBJCOUNT = 2000; //object count public const int DEFAULT_ITERATIONS = 400; public const float DEFAULT_PINNED = 1.0F; //percent pinned public const float DEFAULT_BUCKET1 = 30.0F; //percentage objects with size in this bucket public const float DEFAULT_BUCKET2 = 30.0F; public const float DEFAULT_BUCKET3 = 20.0F; public const float DEFAULT_BUCKET4 = 10.0F; //remaining will be allocated on Large Object Heap public const int DEFAULT_THREADS = 4; //number of allocating threads public const int THREAD_IDLE_TIME = 0; //milliseconds public const int MAX_REFS = 4; //max number of refernces to another object ///////////// end default parameters public static long timeout = 0; public static SizeBucket[] sizeBuckets = new SizeBucket[SIZEBUCKET_COUNT]; //Default settings: //minimum and maximum object lifetime (milliseconds) public static int minLife = DEFAULT_MINLIFE; public static int maxLife = DEFAULT_MAXLIFE; //how many objects will be initially allocated public static int objCount = DEFAULT_OBJCOUNT; public static int countIters = DEFAULT_ITERATIONS; public static float percentPinned = DEFAULT_PINNED; public static bool LOHpin = false; //if true, apply the percentPinned to just LOH, not overall. public static float percentBucket1 = DEFAULT_BUCKET1; public static float percentBucket2 = DEFAULT_BUCKET2; public static float percentBucket3 = DEFAULT_BUCKET3; public static float percentBucket4 = DEFAULT_BUCKET4; public static int maxRef = MAX_REFS; public static int numThreads = DEFAULT_THREADS; public static int threadIdleTime = THREAD_IDLE_TIME;//milliseconds public static int randomSeed; public static ObjArray objectCollection; public static List<Thread> threadList = new List<Thread>(); public static System.Diagnostics.Stopwatch stopWatch = new System.Diagnostics.Stopwatch(); public static Object objLock = new Object(); private static bool s_noLocks = false; //for the option to not use locks when accessing objects. //keeping track of status: public static UInt64 current_TotalObjCount; public static UInt64 current_pinObjCount; public static UInt64[] current_bucketObjCount = new UInt64[SIZEBUCKET_COUNT]; //how many objects are in each bucket public static UInt64 current_LOHObjects = 0; public static bool testDone = false; //for status output: //keep track of the collection count for generations 0, 1, 2 public static int[] currentCollections = new int[3]; public static int outputFrequency = 0; //after how many iterations the data is printed public static System.TimeSpan totalTime; //weak ref array that keeps track of all objects public static WeakReferenceCollection WR_All = new WeakReferenceCollection(); public static ObjectWrapper dummyObject; public static int pointerSize = 4; //bytes [ThreadStatic] public static Random Rand; //This is because Random is not thread safe and it stops returning a random number after a while //public static int GetRandomNumber(int min, int max) //{ // lock(objLock) // { // return RandomObj.Next(min, max); // } //} private static ObjectWrapper CreateObject() { bool isLOHObject = false; //pick a random value for the lifespan, from the min/max lifespan interval int lifeSpan = Rand.Next(minLife, maxLife); //decide the data size for this object int size = 0; for (int i = 0; i < SIZEBUCKET_COUNT; i++) { //First find what bucket to assign //find current percentage for bucket i; if smaller than target percentage, assign to this bucket if ((float)current_bucketObjCount[i] * 100.0F / (float)current_TotalObjCount < sizeBuckets[i].percentage) { size = Rand.Next(sizeBuckets[i].minsize, sizeBuckets[i].maxsize); //Console.WriteLine("bucket={0}, size {1}", i, size); current_bucketObjCount[i]++; break; } } if (size == 0) //buckets are full; assign to LOH { isLOHObject = true; size = Rand.Next(85000, 130000); current_LOHObjects++; //Console.WriteLine("LOH " + size); } int references = Rand.Next(1, maxRef); //decide if to make this object pinned bool pin = false; if ((LOHpin && isLOHObject) || !LOHpin) { float pinPercentage; if (LOHpin) { pinPercentage = (float)current_pinObjCount * 100.0F / (float)current_LOHObjects; } else { pinPercentage = (float)current_pinObjCount * 100.0F / (float)current_TotalObjCount; } if (pinPercentage < percentPinned) { pin = true; current_pinObjCount++; } } ObjectWrapper myNewObject; myNewObject = new ObjectWrapper(lifeSpan, size, pin, references); current_TotalObjCount++; /* lock (objLock) { Console.WriteLine("Created object with: "); Console.WriteLine("datasize= " + size); Console.WriteLine("lifetime= " + lifeSpan); Console.WriteLine("pinned= " + pin); } */ WR_All.Add(myNewObject); return myNewObject; } /* static void AddReference(ObjectWrapper parent, ObjectWrapper child) { //find an empty position to add a reference in the parent //else use a random position int position = GetRandomNumber(maxRef); for (int i = 0; i < maxRef; i++) { if (parent.objRef[i] == null) { position = i; break; } } parent.objRef[position] = child; } * */ public static int Main(string[] args) { // if (Environment.Is64BitProcess) pointerSize = 8; stopWatch.Start(); for (int i = 0; i < 3; i++) { currentCollections[i] = 0; } if (!ParseArgs(args)) return 101; dummyObject = new ObjectWrapper(0, 0, true, 0); objectCollection = new ObjArray(); objectCollection.Init(objCount); //One thread is in charge of updating the object age Thread thrd = new Thread(UpdateObjectAge); thrd.Start(); //another thread is removing expired objects Thread thrd2 = new Thread(RemoveExpiredObjects); thrd2.Start(); //another thread is removing weak references to dead objects Thread thrd3 = new Thread(RemoveWeakReferences); thrd3.Start(); // Run the test. for (int i = 0; i < numThreads; ++i) { Thread thread = new Thread(RunTest); threadList.Add(thread); thread.Start(i); } foreach (Thread t in threadList) { t.Join(); } testDone = true; return 100; } public static void RunTest(object threadInfoObj) { System.Diagnostics.Stopwatch threadStopwatch = new System.Diagnostics.Stopwatch(); threadStopwatch.Start(); int threadIndex = (int)threadInfoObj; //initialize the thread static random with a different seed for each thread Rand = new Random(randomSeed + threadIndex); //Allocate the initial objects. Each thread creates objects for one portion of the array. int objPerThread = objCount / numThreads; int remainder = objCount % numThreads; if (threadIndex == numThreads - 1) { objPerThread += remainder; } int begin = threadIndex * (objCount / numThreads); Console.WriteLine("thread " + threadIndex + "; allocating " + objPerThread + " objects;"); int beginIndex = threadIndex * (objCount / numThreads); for (int i = beginIndex; i < beginIndex + objPerThread; i++) { objectCollection.SetObjectAt(CreateObject(), i); } //lock (objLock) //{ // Console.WriteLine("thread {1}:Number of objects in collection: {0}; objCount={2}", objectCollection.Count, threadIndex, current_TotalObjCount); //} //Console.WriteLine("thread {1}:Number of objects in WR: {0}", WR_All.Count, threadIndex); Console.WriteLine("starting steady state"); //Steady state: objects die and others are created for (int i = 0; i < countIters; ++i) { for (int j = 0; j < objCount; j++) { // Randomly access a position in the collection int pos = Rand.Next(0, objCount); //Console.WriteLine("pos " + pos); bool ret; if (s_noLocks) { ret = objectCollection.AccessObjectAt(pos); } else { lock (objLock) { ret = objectCollection.AccessObjectAt(pos); } } //Console.WriteLine("Thread " + threadIndex + " accessing object at " + pos + " expired= " + ret); } if ((Rand.Next(0, numThreads) != 0)) { Thread.Sleep(threadIdleTime); //Console.WriteLine("Number of objects in collection: {0}", objectCollection.Count); //Console.WriteLine("Number of objects in WR: {0}", WR_All.Count); } if (outputFrequency > 0 && i > 0) { if ((i % outputFrequency == 0 || i == countIters - 1) && threadIndex == 0) { OutputGCStats(i); } } } testDone = true; } public static void UpdateObjectAge(object threadInfoObj) { long previousTime = 0; while (!testDone) { long currentTime = stopWatch.ElapsedMilliseconds; //Console.WriteLine("time when starting loop:" + stopWatch.ElapsedMilliseconds); if (currentTime - previousTime >= 1) { //Console.WriteLine("time to update" + (currentTime - previousTime)); objectCollection.UpdateObjectsAge(currentTime - previousTime); } else System.Threading.Thread.Sleep(1); previousTime = currentTime; } } public static void RemoveExpiredObjects(object threadInfoObj) { long previousTime = 0; while (!testDone) { long currentTime = stopWatch.ElapsedMilliseconds; if (currentTime - previousTime >= 1) { //Console.WriteLine("time when removeExpired loop:" + (currentTime - previousTime)); objectCollection.RemoveExpiredObjects(); } else System.Threading.Thread.Sleep(1); previousTime = currentTime; } } public static void RemoveWeakReferences(object threadInfoObj) { System.Threading.Thread.Sleep(100); while (!testDone) { System.Threading.Thread.Sleep(100); WR_All.RemoveDeadObjects(); } } public static void OutputGCStats(int iterations) { Console.WriteLine("Iterations = {0}", iterations); Console.WriteLine("AllocatedMemory = {0} bytes", GC.GetTotalMemory(false)); Console.WriteLine("Number of objects in collection: {0}", objectCollection.Count); //get the number of collections and the elapsed time for this group of iterations int[] collectionCount = new int[3]; for (int j = 0; j < 3; j++) { collectionCount[j] = GC.CollectionCount(j); } int[] newCollections = new int[3]; for (int j = 0; j < 3; j++) { newCollections[j] = collectionCount[j] - currentCollections[j]; } //update the running count of collections for (int j = 0; j < 3; j++) { currentCollections[j] = collectionCount[j]; } Console.WriteLine("Gen 0 Collections = {0}", newCollections[0]); Console.WriteLine("Gen 1 Collections = {0}", newCollections[1]); Console.WriteLine("Gen 2 Collections = {0}", newCollections[2]); stopWatch.Stop(); Console.Write("Elapsed time: "); System.TimeSpan tSpan = stopWatch.Elapsed; if (tSpan.Days > 0) Console.Write("{0} days, ", tSpan.Days); if (tSpan.Hours > 0) Console.Write("{0} hours, ", tSpan.Hours); if (tSpan.Minutes > 0) Console.Write("{0} minutes, ", tSpan.Minutes); Console.Write("{0} seconds, ", tSpan.Seconds); Console.Write("{0} milliseconds", tSpan.Milliseconds); totalTime += tSpan; stopWatch.Reset(); stopWatch.Start(); Console.Write(" (Total time: "); if (totalTime.Days > 0) Console.Write("{0} days, ", totalTime.Days); if (totalTime.Hours > 0) Console.Write("{0} hours, ", totalTime.Hours); if (totalTime.Minutes > 0) Console.Write("{0} minutes, ", totalTime.Minutes); Console.Write("{0} seconds, ", totalTime.Seconds); Console.WriteLine("{0} milliseconds)", totalTime.Milliseconds); Console.WriteLine("----------------------------------"); } public static void InitializeSizeBuckets() { sizeBuckets[0] = new SizeBucket(BUCKET1_MIN, BUCKET2_MIN, percentBucket1); sizeBuckets[1] = new SizeBucket(BUCKET2_MIN, BUCKET3_MIN, percentBucket2); sizeBuckets[2] = new SizeBucket(BUCKET3_MIN, BUCKET4_MIN, percentBucket3); sizeBuckets[3] = new SizeBucket(BUCKET4_MIN, BUCKETS_MAX, percentBucket4); } /// Parse the arguments and also initialize values that are not set by args public static bool ParseArgs(string[] args) { randomSeed = (int)DateTime.Now.Ticks; try { for (int i = 0; i < args.Length; ++i) { string currentArg = args[i]; //Console.WriteLine(currentArg); string currentArgValue; if (currentArg.StartsWith("-") || currentArg.StartsWith("/")) { currentArg = currentArg.Substring(1); } else { Console.WriteLine("Error! Unexpected argument {0}", currentArg); return false; } if (currentArg.StartsWith("?")) { Usage(); return false; } else if (String.Compare(currentArg.ToLower(), "iter") == 0) // number of iterations { currentArgValue = args[++i]; countIters = Int32.Parse(currentArgValue); } else if (String.Compare(currentArg.ToLower(), "minlife") == 0) { currentArgValue = args[++i]; minLife = Int32.Parse(currentArgValue); } else if (String.Compare(currentArg.ToLower(), "maxlife") == 0) { currentArgValue = args[++i]; maxLife = Int32.Parse(currentArgValue); } else if (String.Compare(currentArg.ToLower(), "objcount") == 0) { currentArgValue = args[++i]; objCount = Int32.Parse(currentArgValue); } // else if (String.Compare(currentArg.ToLower(), "maxref") == 0) // { // currentArgValue = args[++i]; // maxRef = Int32.Parse(currentArgValue); // } else if (String.Compare(currentArg.ToLower(), "threads") == 0 || String.Compare(currentArg, "t") == 0) { currentArgValue = args[++i]; numThreads = Int32.Parse(currentArgValue); } else if (String.Compare(currentArg.ToLower(), "idletime") == 0 || String.Compare(currentArg, "t") == 0) { currentArgValue = args[++i]; threadIdleTime = Int32.Parse(currentArgValue); } else if (String.Compare(currentArg.ToLower(), "pinned") == 0) { currentArgValue = args[++i]; percentPinned = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture); } else if (String.Compare(currentArg.ToLower(), "lohpin") == 0) //for LOH compacting testing, this is the option to apply the pinning percentage to LOH { LOHpin = true; } else if (String.Compare(currentArg.ToLower(), "bucket1") == 0) { currentArgValue = args[++i]; percentBucket1 = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture); } else if (String.Compare(currentArg.ToLower(), "bucket2") == 0) { currentArgValue = args[++i]; percentBucket2 = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture); } else if (String.Compare(currentArg.ToLower(), "bucket3") == 0) { currentArgValue = args[++i]; percentBucket3 = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture); } else if (String.Compare(currentArg.ToLower(), "bucket4") == 0) { currentArgValue = args[++i]; percentBucket4 = float.Parse(currentArgValue, System.Globalization.CultureInfo.InvariantCulture); } else if (String.Compare(currentArg.ToLower(), "nolocks") == 0) { s_noLocks = true; } else if (String.Compare(currentArg.ToLower(), "timeout") == 0) //seconds; if 0 run forever { currentArgValue = args[++i]; timeout = Int64.Parse(currentArgValue); if (timeout == -1) { timeout = Int64.MaxValue; } } else if (String.Compare(currentArg.ToLower(), "out") == 0) //output frequency { currentArgValue = args[++i]; outputFrequency = int.Parse(currentArgValue); } else if (String.Compare(currentArg.ToLower(), "randomseed") == 0) // number of iterations { currentArgValue = args[++i]; randomSeed = Int32.Parse(currentArgValue); } else { Console.WriteLine("Error! Unexpected argument {0}", currentArg); return false; } } } catch (System.Exception e) { Console.WriteLine("Incorrect arguments"); Console.WriteLine(e.ToString()); return false; } //do some basic checking of the arguments if (countIters < 1 || numThreads < 1 || minLife < 1 || maxLife < 1 || objCount < 1 || outputFrequency < 0) { Console.WriteLine("Incorrect values for arguments"); return false; } if (percentPinned < 0 || percentPinned > 100) { Console.WriteLine("Incorrect values for percent arguments"); return false; } if (percentBucket1 + percentBucket2 + percentBucket3 + percentBucket4 > 100) { Console.WriteLine("Bad values for buckets percentage"); return false; } InitializeSizeBuckets(); Console.WriteLine("Repro with: "); Console.WriteLine("=============================="); Console.WriteLine("-iter " + countIters); Console.WriteLine("-minlife " + minLife); Console.WriteLine("-maxlife " + maxLife); Console.WriteLine("-objcount " + objCount); Console.WriteLine("-t " + numThreads); Console.WriteLine("-pinned " + percentPinned); Console.WriteLine("-bucket1 " + percentBucket1); Console.WriteLine("-bucket2 " + percentBucket2); Console.WriteLine("-bucket3 " + percentBucket3); Console.WriteLine("-bucket4 " + percentBucket4); // Console.WriteLine("-collectiontype " + CollectionTypeToString(objectCollectionType)); // Console.WriteLine("-maxref " + maxRef); Console.WriteLine("-out " + outputFrequency); Console.WriteLine("-randomseed " + randomSeed); Console.WriteLine("=============================="); return true; } public static void Usage() { Console.WriteLine("GCSimulator [options]"); Console.WriteLine("\nOptions"); Console.WriteLine("-? Display the usage and exit"); Console.WriteLine("-i [-iter] <num iterations> : specify number of iterations for the test, default is " + countIters); Console.WriteLine("-t <number of threads> : specifiy number of threads, default is " + numThreads); Console.WriteLine("-minlife <milliseconds> : minimum object lifetime, default is " + minLife); Console.WriteLine("-maxlife <milliseconds> : maximum object lifetime, default is " + maxLife); Console.WriteLine("-objcount <count> : how many objects are initially allocated, default is " + objCount); Console.WriteLine("-pinned <percent of pinned objects> : specify the percentage of data that we want to pin (number from 0 to 100), default is " + percentPinned); Console.WriteLine("-bucket1 <percentage> : specify the percentage of be in size bucket1(" + BUCKET1_MIN + "bytes to " + BUCKET2_MIN + "bytes), default is " + DEFAULT_BUCKET1); Console.WriteLine("-bucket2 <percentage> : specify the percentage of be in size bucket2(" + BUCKET2_MIN + "bytes to " + BUCKET3_MIN + "bytes), default is " + DEFAULT_BUCKET2); Console.WriteLine("-bucket3 <percentage> : specify the percentage of be in size bucket3(" + BUCKET3_MIN + "bytes to " + BUCKET4_MIN + "bytes), default is " + DEFAULT_BUCKET3); Console.WriteLine("-bucket4 <percentage> : specify the percentage of be in size bucket4(" + BUCKET4_MIN + "bytes to " + BUCKETS_MAX + "bytes), default is " + DEFAULT_BUCKET4); // Console.WriteLine("-collectiontype <List|Tree|Graph> : specify whether to use a list, tree or graph, default is " + CollectionTypeToString(objectCollectionType)); // Console.WriteLine("-maxref <number of references> : maximum number of references an object can have(only for tree and graph collection type), default is " + maxRef); Console.WriteLine("-out <iterations> : after how many iterations to output data"); Console.WriteLine("-randomseed <seed> : random seed(for repro)"); } public class ObjectWrapper { public GCHandle m_pinnedHandle; public bool m_pinned = false; public int m_lifeTime; //milliseconds public long m_age = 0; public long m_creationTime; // public Object[] objRef = new Object[maxRef]; //references to other objects // public int refCount = 0; //how many objects point at this object public Object[] m_data; protected byte[] m_pinnedData; public int m_dataSize; public ObjectWrapper(int lifetime, int datasize, bool pinned, int references) { m_creationTime = DateTime.Now.Ticks / 10000; //we want m_data to have an approximate size of dataSize m_dataSize = datasize; m_lifeTime = lifetime; m_pinned = pinned; //we want m_data to have an approximate size of m_dataSize if (!pinned) //Cannot pin m_data because we cannot pin reference type objects { m_data = new Object[m_dataSize / pointerSize]; for (int i = 0; i < m_data.Length; i += 100) { m_data[i] = "abc"; } for (int i = 0; i < references; i++) { //set up a reference from this new object to other objects //We can set from the new object to: //1. old objects (from the weak reference array) //2. create new objects //decide on one of those options: int option = Rand.Next(0, 20); int toIndex; int fromIndex = Rand.Next(0, m_data.Length); switch (option) { case 0: //objects in the (strong reference) collection toIndex = Rand.Next(0, objectCollection.Count); ObjectWrapper ow = objectCollection.GetObjectAt(toIndex); if (ow != dummyObject) m_data[fromIndex] = ow; break; case 1: //objects in the weak reference collection toIndex = Rand.Next(0, WR_All.Count); m_data[fromIndex] = WR_All.GetObjectAt(toIndex); break; case 2: //new objects m_data[fromIndex] = CreateObject(); break; } } WR_All.Add(m_data); } else //pinned { m_pinnedData = new byte[m_dataSize]; for (int i = 0; i < m_dataSize; i += 1000) { m_pinnedData[i] = 5; } m_pinnedHandle = GCHandle.Alloc(m_pinnedData, GCHandleType.Pinned); WR_All.Add(m_pinnedData); } } public void CleanUp() { if (m_pinned) { if (m_pinnedHandle.IsAllocated) m_pinnedHandle.Free(); } } ~ObjectWrapper() { CleanUp(); } } /* /////////////////////////// Collection definition public interface ObjCollection { void Init(int numberOfObjects); ObjectWrapper GetObjectAt(int index); void SetObjectAt(ObjectWrapper o, int index); bool AccessObjectAt(int index); int Count { get; } int Size { get; } //One pass through the collection, updates objects'age and removes expired ones void UpdateObjectsAge(long elapsedMsec); void RemoveExpiredObjects(); } * */ public class ObjArray { private ObjectWrapper[] _array; private int _size; public void Init(int numberOfObjects) { _array = new ObjectWrapper[numberOfObjects]; for (int i = 0; i < numberOfObjects; i++) { _array[i] = dummyObject; } _size = numberOfObjects; } public void SetObjectAt(ObjectWrapper o, int index) { if (index >= _size) { Console.WriteLine("AddObjectAt " + index + " index is out of bounds"); } _array[index] = o; } public bool AccessObjectAt(int index) { if (index >= _size) return false; if (/*m_Array[index] == null || */_array[index] == dummyObject) { _array[index] = CreateObject(); return true; } // if (!noLocks) // { long timeNow = DateTime.Now.Ticks / 10000; if ((timeNow - _array[index].m_creationTime) > _array[index].m_lifeTime) { // Console.WriteLine("Object age" + (timeNow - m_Array[index].m_creationTime) + "; lifetime=" + m_Array[index].m_lifeTime); //object is expired; put a new one in its place _array[index] = CreateObject(); return true; } // } return false; } public ObjectWrapper GetObjectAt(int index) { if (index >= _size) { Console.WriteLine("GetObject " + index + " index is out of bounds"); } return _array[index]; } public int Count { get { int count = 0; for (int i = 0; i < _size; i++) { if (_array[i] != dummyObject) count++; } return count; } } public int Size { get { return _size; } } //One pass through the collection, updates objects'age public void UpdateObjectsAge(long elapsedMsec) { for (int i = 0; i < _size; i++) { ObjectWrapper o = _array[i]; o.m_age += (int)elapsedMsec; } } //One pass through the collection, removes expired ones public void RemoveExpiredObjects() { for (int i = 0; i < _size; i++) { if (_array[i] != dummyObject) { if (s_noLocks) { if (_array[i].m_age >= _array[i].m_lifeTime) { _array[i] = dummyObject; } } else { lock (objLock) { if (_array[i].m_age >= _array[i].m_lifeTime) { _array[i] = dummyObject; } } } } } } } public class WeakReferenceCollection { private object _WRLock; private List<WeakReference> _WR; public WeakReferenceCollection() { _WRLock = new Object(); _WR = new List<WeakReference>(); } public void Add(Object o) { lock (_WRLock) { _WR.Add(new WeakReference(o)); } } public Object GetObjectAt(int index) { lock (_WRLock) { if (index >= _WR.Count) index = _WR.Count - 1; if (_WR[index] == null) { Console.WriteLine("WRAll null:" + index); // commented out for coreclr //Environment.Exit(0); } if (_WR[index].IsAlive) { if (_WR[index].Target != null) { Object[] target = _WR[index].Target as Object[]; if (target != null) { return target; } } } return null; } } public void CheckWRAll() { lock (_WRLock) { for (int i = 0; i < _WR.Count; i++) { if (_WR[i] == null) { Console.WriteLine("null:" + i); // commented out for coreclr //Environment.Exit(0); } } } } public int Count { get { lock (_WRLock) { return _WR.Count; } } } public void RemoveDeadObjects() { lock (_WRLock) { int endCollection = _WR.Count - 1; for (int i = endCollection; i >= 0; i--) { if (!_WR[i].IsAlive) { _WR.RemoveAt(i); } } } } ////Try setting a reference from an object in the list to the new object //public bool SetRefFromOldToNew(Object myNewObject) //{ // //Pick a random list position and determine if it is possible to set a ref from this list position to the new object // lock (WRLock) // { // int pos = Rand.Next(0, WR.Count); // if (WR[pos] == null) // return false; // if (!WR[pos].IsAlive) // return false; // ObjectWrapper ow = WR[pos].Target as ObjectWrapper; // if (ow == null) // { // Object[] data = WR[pos].Target as Object[]; // if (data != null) // { // int index = Rand.Next(0, data.Length); // data[index] = myNewObject; // return true; // } // } // else // { // if (ow.m_pinned == true) // return false; // if (ow.m_data != null) // { // int index = Rand.Next(0, ow.m_data.Length); // ow.m_data[index] = myNewObject; // Console.WriteLine("Set ref from {0}, {1}", pos, index); // return true; // } // } // } // return false; //} } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Globalization; using System.Linq; using Xunit; namespace System.Tests { public partial class StringTests { [Theory] // CurrentCulture [InlineData("Hello", "ello", StringComparison.CurrentCulture, true)] [InlineData("Hello", "ELL", StringComparison.CurrentCulture, false)] [InlineData("Hello", "ElLo", StringComparison.CurrentCulture, false)] [InlineData("Hello", "Larger Hello", StringComparison.CurrentCulture, false)] [InlineData("Hello", "Goodbye", StringComparison.CurrentCulture, false)] [InlineData("", "", StringComparison.CurrentCulture, true)] [InlineData("", "hello", StringComparison.CurrentCulture, false)] [InlineData("Hello", "", StringComparison.CurrentCulture, true)] [InlineData("Hello", "ell" + SoftHyphen, StringComparison.CurrentCulture, true)] [InlineData("Hello", "Ell" + SoftHyphen, StringComparison.CurrentCulture, false)] // CurrentCultureIgnoreCase [InlineData("Hello", "ello", StringComparison.CurrentCultureIgnoreCase, true)] [InlineData("Hello", "ELL", StringComparison.CurrentCultureIgnoreCase, true)] [InlineData("Hello", "ElLo", StringComparison.CurrentCultureIgnoreCase, true)] [InlineData("Hello", "Larger Hello", StringComparison.CurrentCultureIgnoreCase, false)] [InlineData("Hello", "Goodbye", StringComparison.CurrentCultureIgnoreCase, false)] [InlineData("", "", StringComparison.CurrentCultureIgnoreCase, true)] [InlineData("", "hello", StringComparison.CurrentCultureIgnoreCase, false)] [InlineData("Hello", "", StringComparison.CurrentCultureIgnoreCase, true)] [InlineData("Hello", "ell" + SoftHyphen, StringComparison.CurrentCultureIgnoreCase, true)] [InlineData("Hello", "Ell" + SoftHyphen, StringComparison.CurrentCultureIgnoreCase, true)] // InvariantCulture [InlineData("Hello", "ello", StringComparison.InvariantCulture, true)] [InlineData("Hello", "ELL", StringComparison.InvariantCulture, false)] [InlineData("Hello", "ElLo", StringComparison.InvariantCulture, false)] [InlineData("Hello", "Larger Hello", StringComparison.InvariantCulture, false)] [InlineData("Hello", "Goodbye", StringComparison.InvariantCulture, false)] [InlineData("", "", StringComparison.InvariantCulture, true)] [InlineData("", "hello", StringComparison.InvariantCulture, false)] [InlineData("Hello", "", StringComparison.InvariantCulture, true)] [InlineData("Hello", "ell" + SoftHyphen, StringComparison.InvariantCulture, true)] [InlineData("Hello", "Ell" + SoftHyphen, StringComparison.InvariantCulture, false)] // InvariantCultureIgnoreCase [InlineData("Hello", "ello", StringComparison.InvariantCultureIgnoreCase, true)] [InlineData("Hello", "ELL", StringComparison.InvariantCultureIgnoreCase, true)] [InlineData("Hello", "ElLo", StringComparison.InvariantCultureIgnoreCase, true)] [InlineData("Hello", "Larger Hello", StringComparison.InvariantCultureIgnoreCase, false)] [InlineData("Hello", "Goodbye", StringComparison.InvariantCultureIgnoreCase, false)] [InlineData("", "", StringComparison.InvariantCultureIgnoreCase, true)] [InlineData("", "hello", StringComparison.InvariantCultureIgnoreCase, false)] [InlineData("Hello", "", StringComparison.InvariantCultureIgnoreCase, true)] [InlineData("Hello", "ell" + SoftHyphen, StringComparison.InvariantCultureIgnoreCase, true)] [InlineData("Hello", "Ell" + SoftHyphen, StringComparison.InvariantCultureIgnoreCase, true)] // Ordinal [InlineData("Hello", "ello", StringComparison.Ordinal, true)] [InlineData("Hello", "ELL", StringComparison.Ordinal, false)] [InlineData("Hello", "ElLo", StringComparison.Ordinal, false)] [InlineData("Hello", "Larger Hello", StringComparison.Ordinal, false)] [InlineData("Hello", "Goodbye", StringComparison.Ordinal, false)] [InlineData("", "", StringComparison.Ordinal, true)] [InlineData("", "hello", StringComparison.Ordinal, false)] [InlineData("Hello", "", StringComparison.Ordinal, true)] [InlineData("Hello", "ell" + SoftHyphen, StringComparison.Ordinal, false)] [InlineData("Hello", "Ell" + SoftHyphen, StringComparison.Ordinal, false)] // OrdinalIgnoreCase [InlineData("Hello", "ello", StringComparison.OrdinalIgnoreCase, true)] [InlineData("Hello", "ELL", StringComparison.OrdinalIgnoreCase, true)] [InlineData("Hello", "ElLo", StringComparison.OrdinalIgnoreCase, true)] [InlineData("Hello", "Larger Hello", StringComparison.OrdinalIgnoreCase, false)] [InlineData("Hello", "Goodbye", StringComparison.OrdinalIgnoreCase, false)] [InlineData("", "", StringComparison.OrdinalIgnoreCase, true)] [InlineData("", "hello", StringComparison.OrdinalIgnoreCase, false)] [InlineData("Hello", "", StringComparison.OrdinalIgnoreCase, true)] [InlineData("Hello", "ell" + SoftHyphen, StringComparison.OrdinalIgnoreCase, false)] [InlineData("Hello", "Ell" + SoftHyphen, StringComparison.OrdinalIgnoreCase, false)] public static void Contains(string s, string value, StringComparison comparisonType, bool expected) { Assert.Equal(expected, s.Contains(value, comparisonType)); } [Fact] public static void Contains_StringComparison_TurkishI() { string str = "\u0069\u0130"; RemoteInvoke((source) => { CultureInfo.CurrentCulture = new CultureInfo("tr-TR"); Assert.True(source.Contains("\u0069\u0069", StringComparison.CurrentCultureIgnoreCase)); return SuccessExitCode; }, str).Dispose(); RemoteInvoke((source) => { CultureInfo.CurrentCulture = new CultureInfo("en-US"); Assert.False(source.Contains("\u0069\u0069", StringComparison.CurrentCultureIgnoreCase)); return SuccessExitCode; }, str).Dispose(); } [Theory] [InlineData(StringComparison.CurrentCulture)] [InlineData(StringComparison.CurrentCultureIgnoreCase)] [InlineData(StringComparison.InvariantCulture)] [InlineData(StringComparison.InvariantCultureIgnoreCase)] [InlineData(StringComparison.Ordinal)] [InlineData(StringComparison.OrdinalIgnoreCase)] public static void Contains_NullValue_ThrowsArgumentNullException(StringComparison comparisonType) { AssertExtensions.Throws<ArgumentNullException>("value", () => "foo".Contains(null, comparisonType)); } [Theory] [InlineData(StringComparison.CurrentCulture - 1)] [InlineData(StringComparison.OrdinalIgnoreCase + 1)] public static void Contains_InvalidComparisonType_ThrowsArgumentOutOfRangeException(StringComparison comparisonType) { AssertExtensions.Throws<ArgumentException>("comparisonType", () => "ab".Contains("a", comparisonType)); } [Theory] [InlineData("Hello", 'o', true)] [InlineData("Hello", 'O', false)] [InlineData("o", 'o', true)] [InlineData("o", 'O', false)] [InlineData("Hello", 'e', false)] [InlineData("Hello", '\0', false)] [InlineData("", '\0', false)] [InlineData("\0", '\0', true)] [InlineData("", 'a', false)] [InlineData("abcdefghijklmnopqrstuvwxyz", 'z', true)] public static void EndsWith(string s, char value, bool expected) { Assert.Equal(expected, s.EndsWith(value)); } [Theory] [InlineData("Hello", 'H', true)] [InlineData("Hello", 'h', false)] [InlineData("H", 'H', true)] [InlineData("H", 'h', false)] [InlineData("Hello", 'e', false)] [InlineData("Hello", '\0', false)] [InlineData("", '\0', false)] [InlineData("\0", '\0', true)] [InlineData("", 'a', false)] [InlineData("abcdefghijklmnopqrstuvwxyz", 'a', true)] public static void StartsWith(string s, char value, bool expected) { Assert.Equal(expected, s.StartsWith(value)); } public static IEnumerable<object[]> Join_Char_StringArray_TestData() { yield return new object[] { '|', new string[0], 0, 0, "" }; yield return new object[] { '|', new string[] { "a" }, 0, 1, "a" }; yield return new object[] { '|', new string[] { "a", "b", "c" }, 0, 3, "a|b|c" }; yield return new object[] { '|', new string[] { "a", "b", "c" }, 0, 2, "a|b" }; yield return new object[] { '|', new string[] { "a", "b", "c" }, 1, 1, "b" }; yield return new object[] { '|', new string[] { "a", "b", "c" }, 1, 2, "b|c" }; yield return new object[] { '|', new string[] { "a", "b", "c" }, 3, 0, "" }; yield return new object[] { '|', new string[] { "a", "b", "c" }, 0, 0, "" }; yield return new object[] { '|', new string[] { "", "", "" }, 0, 3, "||" }; yield return new object[] { '|', new string[] { null, null, null }, 0, 3, "||" }; } [Theory] [MemberData(nameof(Join_Char_StringArray_TestData))] public static void Join_Char_StringArray(char separator, string[] values, int startIndex, int count, string expected) { if (startIndex == 0 && count == values.Length) { Assert.Equal(expected, string.Join(separator, values)); Assert.Equal(expected, string.Join(separator, (IEnumerable<string>)values)); Assert.Equal(expected, string.Join(separator, (object[])values)); Assert.Equal(expected, string.Join(separator, (IEnumerable<object>)values)); } Assert.Equal(expected, string.Join(separator, values, startIndex, count)); Assert.Equal(expected, string.Join(separator.ToString(), values, startIndex, count)); } public static IEnumerable<object[]> Join_Char_ObjectArray_TestData() { yield return new object[] { '|', new object[0], "" }; yield return new object[] { '|', new object[] { 1 }, "1" }; yield return new object[] { '|', new object[] { 1, 2, 3 }, "1|2|3" }; yield return new object[] { '|', new object[] { new ObjectWithNullToString(), 2, new ObjectWithNullToString() }, "|2|" }; yield return new object[] { '|', new object[] { "1", null, "3" }, "1||3" }; yield return new object[] { '|', new object[] { "", "", "" }, "||" }; yield return new object[] { '|', new object[] { "", null, "" }, "||" }; yield return new object[] { '|', new object[] { null, null, null }, "||" }; } [Theory] [MemberData(nameof(Join_Char_ObjectArray_TestData))] public static void Join_Char_ObjectArray(char separator, object[] values, string expected) { Assert.Equal(expected, string.Join(separator, values)); Assert.Equal(expected, string.Join(separator, (IEnumerable<object>)values)); } [Fact] public static void Join_Char_NullValues_ThrowsArgumentNullException() { AssertExtensions.Throws<ArgumentNullException>("value", () => string.Join('|', (string[])null)); AssertExtensions.Throws<ArgumentNullException>("value", () => string.Join('|', (string[])null, 0, 0)); AssertExtensions.Throws<ArgumentNullException>("values", () => string.Join('|', (object[])null)); AssertExtensions.Throws<ArgumentNullException>("values", () => string.Join('|', (IEnumerable<object>)null)); } [Fact] public static void Join_Char_NegativeStartIndex_ThrowsArgumentOutOfRangeException() { AssertExtensions.Throws<ArgumentOutOfRangeException>("startIndex", () => string.Join('|', new string[] { "Foo" }, -1, 0)); } [Fact] public static void Join_Char_NegativeCount_ThrowsArgumentOutOfRangeException() { AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => string.Join('|', new string[] { "Foo" }, 0, -1)); } [Theory] [InlineData(2, 1)] [InlineData(2, 0)] [InlineData(1, 2)] [InlineData(1, 1)] [InlineData(0, 2)] [InlineData(-1, 0)] public static void Join_Char_InvalidStartIndexCount_ThrowsArgumentOutOfRangeException(int startIndex, int count) { AssertExtensions.Throws<ArgumentOutOfRangeException>("startIndex", () => string.Join('|', new string[] { "Foo" }, startIndex, count)); } public static IEnumerable<object[]> Replace_StringComparison_TestData() { yield return new object[] { "abc", "abc", "def", StringComparison.CurrentCulture, "def" }; yield return new object[] { "abc", "ABC", "def", StringComparison.CurrentCulture, "abc" }; yield return new object[] { "abc", "abc", "", StringComparison.CurrentCulture, "" }; yield return new object[] { "abc", "b", "LONG", StringComparison.CurrentCulture, "aLONGc" }; yield return new object[] { "abc", "b", "d", StringComparison.CurrentCulture, "adc" }; yield return new object[] { "abc", "b", null, StringComparison.CurrentCulture, "ac" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", StringComparison.CurrentCulture, "def" }; yield return new object[] { "abc", "abc", "def", StringComparison.CurrentCultureIgnoreCase, "def" }; yield return new object[] { "abc", "ABC", "def", StringComparison.CurrentCultureIgnoreCase, "def" }; yield return new object[] { "abc", "abc", "", StringComparison.CurrentCultureIgnoreCase, "" }; yield return new object[] { "abc", "b", "LONG", StringComparison.CurrentCultureIgnoreCase, "aLONGc" }; yield return new object[] { "abc", "b", "d", StringComparison.CurrentCultureIgnoreCase, "adc" }; yield return new object[] { "abc", "b", null, StringComparison.CurrentCultureIgnoreCase, "ac" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", StringComparison.CurrentCultureIgnoreCase, "def" }; yield return new object[] { "abc", "abc", "def", StringComparison.Ordinal, "def" }; yield return new object[] { "abc", "ABC", "def", StringComparison.Ordinal, "abc" }; yield return new object[] { "abc", "abc", "", StringComparison.Ordinal, "" }; yield return new object[] { "abc", "b", "LONG", StringComparison.Ordinal, "aLONGc" }; yield return new object[] { "abc", "b", "d", StringComparison.Ordinal, "adc" }; yield return new object[] { "abc", "b", null, StringComparison.Ordinal, "ac" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", StringComparison.Ordinal, "abc" }; yield return new object[] { "abc", "abc", "def", StringComparison.OrdinalIgnoreCase, "def" }; yield return new object[] { "abc", "ABC", "def", StringComparison.OrdinalIgnoreCase, "def" }; yield return new object[] { "abc", "abc", "", StringComparison.OrdinalIgnoreCase, "" }; yield return new object[] { "abc", "b", "LONG", StringComparison.OrdinalIgnoreCase, "aLONGc" }; yield return new object[] { "abc", "b", "d", StringComparison.OrdinalIgnoreCase, "adc" }; yield return new object[] { "abc", "b", null, StringComparison.OrdinalIgnoreCase, "ac" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", StringComparison.OrdinalIgnoreCase, "abc" }; yield return new object[] { "abc", "abc", "def", StringComparison.InvariantCulture, "def" }; yield return new object[] { "abc", "ABC", "def", StringComparison.InvariantCulture, "abc" }; yield return new object[] { "abc", "abc", "", StringComparison.InvariantCulture, "" }; yield return new object[] { "abc", "b", "LONG", StringComparison.InvariantCulture, "aLONGc" }; yield return new object[] { "abc", "b", "d", StringComparison.InvariantCulture, "adc" }; yield return new object[] { "abc", "b", null, StringComparison.InvariantCulture, "ac" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", StringComparison.InvariantCulture, "def" }; yield return new object[] { "abc", "abc", "def", StringComparison.InvariantCultureIgnoreCase, "def" }; yield return new object[] { "abc", "ABC", "def", StringComparison.InvariantCultureIgnoreCase, "def" }; yield return new object[] { "abc", "abc", "", StringComparison.InvariantCultureIgnoreCase, "" }; yield return new object[] { "abc", "b", "LONG", StringComparison.InvariantCultureIgnoreCase, "aLONGc" }; yield return new object[] { "abc", "b", "d", StringComparison.InvariantCultureIgnoreCase, "adc" }; yield return new object[] { "abc", "b", null, StringComparison.InvariantCultureIgnoreCase, "ac" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", StringComparison.InvariantCultureIgnoreCase, "def" }; string turkishSource = "\u0069\u0130"; yield return new object[] { turkishSource, "\u0069", "a", StringComparison.Ordinal, "a\u0130" }; yield return new object[] { turkishSource, "\u0069", "a", StringComparison.OrdinalIgnoreCase, "a\u0130" }; yield return new object[] { turkishSource, "\u0130", "a", StringComparison.Ordinal, "\u0069a" }; yield return new object[] { turkishSource, "\u0130", "a", StringComparison.OrdinalIgnoreCase, "\u0069a" }; yield return new object[] { turkishSource, "\u0069", "a", StringComparison.InvariantCulture, "a\u0130" }; yield return new object[] { turkishSource, "\u0069", "a", StringComparison.InvariantCultureIgnoreCase, "a\u0130" }; yield return new object[] { turkishSource, "\u0130", "a", StringComparison.InvariantCulture, "\u0069a" }; yield return new object[] { turkishSource, "\u0130", "a", StringComparison.InvariantCultureIgnoreCase, "\u0069a" }; } [Theory] [MemberData(nameof(Replace_StringComparison_TestData))] public void Replace_StringComparison_ReturnsExpected(string original, string oldValue, string newValue, StringComparison comparisonType, string expected) { Assert.Equal(expected, original.Replace(oldValue, newValue, comparisonType)); } [Fact] public void Replace_StringComparison_TurkishI() { string src = "\u0069\u0130"; RemoteInvoke((source) => { CultureInfo.CurrentCulture = new CultureInfo("tr-TR"); Assert.True("\u0069".Equals("\u0130", StringComparison.CurrentCultureIgnoreCase)); Assert.Equal("a\u0130", source.Replace("\u0069", "a", StringComparison.CurrentCulture)); Assert.Equal("aa", source.Replace("\u0069", "a", StringComparison.CurrentCultureIgnoreCase)); Assert.Equal("\u0069a", source.Replace("\u0130", "a", StringComparison.CurrentCulture)); Assert.Equal("aa", source.Replace("\u0130", "a", StringComparison.CurrentCultureIgnoreCase)); CultureInfo.CurrentCulture = new CultureInfo("en-US"); Assert.False("\u0069".Equals("\u0130", StringComparison.CurrentCultureIgnoreCase)); Assert.Equal("a\u0130", source.Replace("\u0069", "a", StringComparison.CurrentCulture)); Assert.Equal("a\u0130", source.Replace("\u0069", "a", StringComparison.CurrentCultureIgnoreCase)); Assert.Equal("\u0069a", source.Replace("\u0130", "a", StringComparison.CurrentCulture)); Assert.Equal("\u0069a", source.Replace("\u0130", "a", StringComparison.CurrentCultureIgnoreCase)); return SuccessExitCode; }, src).Dispose(); } public static IEnumerable<object[]> Replace_StringComparisonCulture_TestData() { yield return new object[] { "abc", "abc", "def", false, null, "def" }; yield return new object[] { "abc", "ABC", "def", false, null, "abc" }; yield return new object[] { "abc", "abc", "def", false, CultureInfo.InvariantCulture, "def" }; yield return new object[] { "abc", "ABC", "def", false, CultureInfo.InvariantCulture, "abc" }; yield return new object[] { "abc", "abc", "def", true, null, "def" }; yield return new object[] { "abc", "ABC", "def", true, null, "def" }; yield return new object[] { "abc", "abc", "def", true, CultureInfo.InvariantCulture, "def" }; yield return new object[] { "abc", "ABC", "def", true, CultureInfo.InvariantCulture, "def" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", false, null, "def" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", true, null, "def" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", false, CultureInfo.InvariantCulture, "def" }; yield return new object[] { "abc", "abc" + SoftHyphen, "def", true, CultureInfo.InvariantCulture, "def" }; yield return new object[] { "\u0069\u0130", "\u0069", "a", false, new CultureInfo("tr-TR"), "a\u0130" }; yield return new object[] { "\u0069\u0130", "\u0069", "a", true, new CultureInfo("tr-TR"), "aa" }; yield return new object[] { "\u0069\u0130", "\u0069", "a", false, CultureInfo.InvariantCulture, "a\u0130" }; yield return new object[] { "\u0069\u0130", "\u0069", "a", true, CultureInfo.InvariantCulture, "a\u0130" }; } [Theory] [MemberData(nameof(Replace_StringComparisonCulture_TestData))] public void Replace_StringComparisonCulture_ReturnsExpected(string original, string oldValue, string newValue, bool ignoreCase, CultureInfo culture, string expected) { Assert.Equal(expected, original.Replace(oldValue, newValue, ignoreCase, culture)); if (culture == null) { Assert.Equal(expected, original.Replace(oldValue, newValue, ignoreCase, CultureInfo.CurrentCulture)); } } [Fact] public void Replace_StringComparison_NullOldValue_ThrowsArgumentException() { AssertExtensions.Throws<ArgumentNullException>("oldValue", () => "abc".Replace(null, "def", StringComparison.CurrentCulture)); AssertExtensions.Throws<ArgumentNullException>("oldValue", () => "abc".Replace(null, "def", true, CultureInfo.CurrentCulture)); } [Fact] public void Replace_StringComparison_EmptyOldValue_ThrowsArgumentException() { AssertExtensions.Throws<ArgumentException>("oldValue", () => "abc".Replace("", "def", StringComparison.CurrentCulture)); AssertExtensions.Throws<ArgumentException>("oldValue", () => "abc".Replace("", "def", true, CultureInfo.CurrentCulture)); } [Theory] [InlineData(StringComparison.CurrentCulture - 1)] [InlineData(StringComparison.OrdinalIgnoreCase + 1)] public void Replace_NoSuchStringComparison_ThrowsArgumentException(StringComparison comparisonType) { AssertExtensions.Throws<ArgumentException>("comparisonType", () => "abc".Replace("abc", "def", comparisonType)); } private static readonly StringComparison[] StringComparisons = (StringComparison[])Enum.GetValues(typeof(StringComparison)); public static IEnumerable<object[]> GetHashCode_StringComparison_Data => StringComparisons.Select(value => new object[] { value }); [Theory] [MemberData(nameof(GetHashCode_StringComparison_Data))] public static void GetHashCode_StringComparison(StringComparison comparisonType) { Assert.Equal(StringComparer.FromComparison(comparisonType).GetHashCode("abc"), "abc".GetHashCode(comparisonType)); } public static IEnumerable<object[]> GetHashCode_NoSuchStringComparison_ThrowsArgumentException_Data => new[] { new object[] { StringComparisons.Min() - 1 }, new object[] { StringComparisons.Max() + 1 }, }; [Theory] [MemberData(nameof(GetHashCode_NoSuchStringComparison_ThrowsArgumentException_Data))] public static void GetHashCode_NoSuchStringComparison_ThrowsArgumentException(StringComparison comparisonType) { AssertExtensions.Throws<ArgumentException>("comparisonType", () => "abc".GetHashCode(comparisonType)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.IO; using System.Text; namespace System.Net.Mime { /// <summary> /// This stream performs in-place decoding of quoted-printable /// encoded streams. Encoding requires copying into a separate /// buffer as the data being encoded will most likely grow. /// Encoding and decoding is done transparently to the caller. /// /// This stream should only be used for the e-mail content. /// Use QEncodedStream for encoding headers. /// </summary> internal class QuotedPrintableStream : DelegatedStream, IEncodableStream { //should we encode CRLF or not? private readonly bool _encodeCRLF; //number of bytes needed for a soft CRLF in folding private const int SizeOfSoftCRLF = 3; //each encoded byte occupies three bytes when encoded private const int SizeOfEncodedChar = 3; //it takes six bytes to encode a CRLF character (a CRLF that does not indicate folding) private const int SizeOfEncodedCRLF = 6; //if we aren't encoding CRLF then it occupies two chars private const int SizeOfNonEncodedCRLF = 2; private static readonly byte[] s_hexDecodeMap = new byte[] { // 0 1 2 3 4 5 6 7 8 9 A B C D E F 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 0 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 1 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 2 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 255, 255, 255, 255, 255, 255, // 3 255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 4 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 5 255, 10, 11, 12, 13, 14, 15, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 6 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 7 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 8 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // 9 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // A 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // B 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // C 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // D 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // E 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, // F }; private static readonly byte[] s_hexEncodeMap = new byte[] { 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 65, 66, 67, 68, 69, 70 }; private readonly int _lineLength; private ReadStateInfo _readState; private WriteStateInfoBase _writeState; /// <summary> /// ctor. /// </summary> /// <param name="stream">Underlying stream</param> /// <param name="lineLength">Preferred maximum line-length for writes</param> internal QuotedPrintableStream(Stream stream, int lineLength) : base(stream) { if (lineLength < 0) { throw new ArgumentOutOfRangeException(nameof(lineLength)); } _lineLength = lineLength; } internal QuotedPrintableStream(Stream stream, bool encodeCRLF) : this(stream, EncodedStreamFactory.DefaultMaxLineLength) { _encodeCRLF = encodeCRLF; } private ReadStateInfo ReadState => _readState ?? (_readState = new ReadStateInfo()); internal WriteStateInfoBase WriteState => _writeState ?? (_writeState = new WriteStateInfoBase(1024, null, null, _lineLength)); public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (offset < 0 || offset > buffer.Length) { throw new ArgumentOutOfRangeException(nameof(offset)); } if (offset + count > buffer.Length) { throw new ArgumentOutOfRangeException(nameof(count)); } WriteAsyncResult result = new WriteAsyncResult(this, buffer, offset, count, callback, state); result.Write(); return result; } public override void Close() { FlushInternal(); base.Close(); } public unsafe int DecodeBytes(byte[] buffer, int offset, int count) { fixed (byte* pBuffer = buffer) { byte* start = pBuffer + offset; byte* source = start; byte* dest = start; byte* end = start + count; // if the last read ended in a partially decoded // sequence, pick up where we left off. if (ReadState.IsEscaped) { // this will be -1 if the previous read ended // with an escape character. if (ReadState.Byte == -1) { // if we only read one byte from the underlying // stream, we'll need to save the byte and // ask for more. if (count == 1) { ReadState.Byte = *source; return 0; } // '=\r\n' means a soft (aka. invisible) CRLF sequence... if (source[0] != '\r' || source[1] != '\n') { byte b1 = s_hexDecodeMap[source[0]]; byte b2 = s_hexDecodeMap[source[1]]; if (b1 == 255) throw new FormatException(SR.Format(SR.InvalidHexDigit, b1)); if (b2 == 255) throw new FormatException(SR.Format(SR.InvalidHexDigit, b2)); *dest++ = (byte)((b1 << 4) + b2); } source += 2; } else { // '=\r\n' means a soft (aka. invisible) CRLF sequence... if (ReadState.Byte != '\r' || *source != '\n') { byte b1 = s_hexDecodeMap[ReadState.Byte]; byte b2 = s_hexDecodeMap[*source]; if (b1 == 255) throw new FormatException(SR.Format(SR.InvalidHexDigit, b1)); if (b2 == 255) throw new FormatException(SR.Format(SR.InvalidHexDigit, b2)); *dest++ = (byte)((b1 << 4) + b2); } source++; } // reset state for next read. ReadState.IsEscaped = false; ReadState.Byte = -1; } // Here's where most of the decoding takes place. // We'll loop around until we've inspected all the // bytes read. while (source < end) { // if the source is not an escape character, then // just copy as-is. if (*source != '=') { *dest++ = *source++; } else { // determine where we are relative to the end // of the data. If we don't have enough data to // decode the escape sequence, save off what we // have and continue the decoding in the next // read. Otherwise, decode the data and copy // into dest. switch (end - source) { case 2: ReadState.Byte = source[1]; goto case 1; case 1: ReadState.IsEscaped = true; goto EndWhile; default: if (source[1] != '\r' || source[2] != '\n') { byte b1 = s_hexDecodeMap[source[1]]; byte b2 = s_hexDecodeMap[source[2]]; if (b1 == 255) throw new FormatException(SR.Format(SR.InvalidHexDigit, b1)); if (b2 == 255) throw new FormatException(SR.Format(SR.InvalidHexDigit, b2)); *dest++ = (byte)((b1 << 4) + b2); } source += 3; break; } } } EndWhile: return (int)(dest - start); } } public int EncodeBytes(byte[] buffer, int offset, int count) { int cur = offset; for (; cur < count + offset; cur++) { //only fold if we're before a whitespace or if we're at the line limit //add two to the encoded Byte Length to be conservative so that we guarantee that the line length is acceptable if ((_lineLength != -1 && WriteState.CurrentLineLength + SizeOfEncodedChar + 2 >= _lineLength && (buffer[cur] == ' ' || buffer[cur] == '\t' || buffer[cur] == '\r' || buffer[cur] == '\n')) || _writeState.CurrentLineLength + SizeOfEncodedChar + 2 >= EncodedStreamFactory.DefaultMaxLineLength) { if (WriteState.Buffer.Length - WriteState.Length < SizeOfSoftCRLF) { return cur - offset; //ok because folding happens externally } WriteState.Append((byte)'='); WriteState.AppendCRLF(false); } // We don't need to worry about RFC 2821 4.5.2 (encoding first dot on a line), // it is done by the underlying 7BitStream //detect a CRLF in the input and encode it. if (buffer[cur] == '\r' && cur + 1 < count + offset && buffer[cur + 1] == '\n') { if (WriteState.Buffer.Length - WriteState.Length < (_encodeCRLF ? SizeOfEncodedCRLF : SizeOfNonEncodedCRLF)) { return cur - offset; } cur++; if (_encodeCRLF) { // The encoding for CRLF is =0D=0A WriteState.Append((byte)'=', (byte)'0', (byte)'D', (byte)'=', (byte)'0', (byte)'A'); } else { WriteState.AppendCRLF(false); } } //ascii chars less than 32 (control chars) and greater than 126 (non-ascii) are not allowed so we have to encode else if ((buffer[cur] < 32 && buffer[cur] != '\t') || buffer[cur] == '=' || buffer[cur] > 126) { if (WriteState.Buffer.Length - WriteState.Length < SizeOfSoftCRLF) { return cur - offset; } //append an = to indicate an encoded character WriteState.Append((byte)'='); //shift 4 to get the first four bytes only and look up the hex digit WriteState.Append(s_hexEncodeMap[buffer[cur] >> 4]); //clear the first four bytes to get the last four and look up the hex digit WriteState.Append(s_hexEncodeMap[buffer[cur] & 0xF]); } else { if (WriteState.Buffer.Length - WriteState.Length < 1) { return cur - offset; } //detect special case: is whitespace at end of line? we must encode it if it is if ((buffer[cur] == (byte)'\t' || buffer[cur] == (byte)' ') && (cur + 1 >= count + offset)) { if (WriteState.Buffer.Length - WriteState.Length < SizeOfEncodedChar) { return cur - offset; } //append an = to indicate an encoded character WriteState.Append((byte)'='); //shift 4 to get the first four bytes only and look up the hex digit WriteState.Append(s_hexEncodeMap[buffer[cur] >> 4]); //clear the first four bytes to get the last four and look up the hex digit WriteState.Append(s_hexEncodeMap[buffer[cur] & 0xF]); } else { WriteState.Append(buffer[cur]); } } } return cur - offset; } public string GetEncodedString() => Encoding.ASCII.GetString(WriteState.Buffer, 0, WriteState.Length); public override void EndWrite(IAsyncResult asyncResult) => WriteAsyncResult.End(asyncResult); public override void Flush() { FlushInternal(); base.Flush(); } private void FlushInternal() { if (_writeState != null && _writeState.Length > 0) { base.Write(WriteState.Buffer, 0, WriteState.Length); WriteState.BufferFlushed(); } } public override void Write(byte[] buffer, int offset, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer)); } if (offset < 0 || offset > buffer.Length) { throw new ArgumentOutOfRangeException(nameof(offset)); } if (offset + count > buffer.Length) { throw new ArgumentOutOfRangeException(nameof(count)); } int written = 0; while (true) { written += EncodeBytes(buffer, offset + written, count - written); if (written < count) { FlushInternal(); } else { break; } } } private sealed class ReadStateInfo { internal bool IsEscaped { get; set; } internal short Byte { get; set; } = -1; } private sealed class WriteAsyncResult : LazyAsyncResult { private readonly QuotedPrintableStream _parent; private readonly byte[] _buffer; private readonly int _offset; private readonly int _count; private static readonly AsyncCallback s_onWrite = new AsyncCallback(OnWrite); private int _written; internal WriteAsyncResult(QuotedPrintableStream parent, byte[] buffer, int offset, int count, AsyncCallback callback, object state) : base(null, state, callback) { _parent = parent; _buffer = buffer; _offset = offset; _count = count; } private void CompleteWrite(IAsyncResult result) { _parent.BaseStream.EndWrite(result); _parent.WriteState.BufferFlushed(); } internal static void End(IAsyncResult result) { WriteAsyncResult thisPtr = (WriteAsyncResult)result; thisPtr.InternalWaitForCompletion(); Debug.Assert(thisPtr._written == thisPtr._count); } private static void OnWrite(IAsyncResult result) { if (!result.CompletedSynchronously) { WriteAsyncResult thisPtr = (WriteAsyncResult)result.AsyncState; try { thisPtr.CompleteWrite(result); thisPtr.Write(); } catch (Exception e) { thisPtr.InvokeCallback(e); } } } internal void Write() { while (true) { _written += _parent.EncodeBytes(_buffer, _offset + _written, _count - _written); if (_written < _count) { IAsyncResult result = _parent.BaseStream.BeginWrite(_parent.WriteState.Buffer, 0, _parent.WriteState.Length, s_onWrite, this); if (!result.CompletedSynchronously) break; CompleteWrite(result); } else { InvokeCallback(); break; } } } } } }
// *********************************************************************** // Copyright (c) 2007 Charlie Poole, Rob Prouse // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections; using System.Linq; using NUnit.Framework.Internal; using NUnit.TestUtilities.Comparers; namespace NUnit.Framework.Constraints { [TestFixture] public class CollectionOrderedConstraintTests { private readonly string NL = Environment.NewLine; #region Ordering Tests [TestCaseSource(nameof(OrderedByData))] public void IsOrderedBy(IEnumerable collection, Constraint constraint) { Assert.That(collection, constraint); } private static readonly object[] OrderedByData = new[] { // Simple Ordering new TestCaseData( new[] { "x", "y", "z" }, Is.Ordered), new TestCaseData( new[] { 1, 2, 3 }, Is.Ordered), new TestCaseData( new[] { "x", "y", "z" }, Is.Ordered.Ascending), new TestCaseData( new[] { 1, 2, 3 }, Is.Ordered.Ascending), new TestCaseData( new[] { "z", "y", "x" }, Is.Ordered.Descending), new TestCaseData( new[] { 3, 2, 1 }, Is.Ordered.Descending), new TestCaseData( new[] { "x", "x", "z" }, Is.Ordered), new TestCaseData( new[] { null, "x", "y" }, Is.Ordered), new TestCaseData( new[] {"y", "x", null}, Is.Ordered.Descending), new TestCaseData( new[] { "x", null, "y" }, Is.Not.Ordered), // Ordered By Single Property new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.By("Value") ), new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.By("Value").Ascending ), new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.Ascending.By("Value") ), new TestCaseData( new[] { new TestClass1(3), new TestClass1(2), new TestClass1(1) }, Is.Ordered.By("Value").Descending ), new TestCaseData( new[] { new TestClass1(3), new TestClass1(2), new TestClass1(1) }, Is.Ordered.Descending.By("Value") ), new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.By("Value").Using(ObjectComparer.Default) ), new TestCaseData( new object[] { new TestClass1(1), new TestClass2(2) }, Is.Ordered.By("Value") ), // Ordered By Two Properties new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.Ascending.By("A").Then.Ascending.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").Ascending.Then.By("B").Ascending ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("XYZ", 99), new TestClass3("XYZ", 2) }, Is.Not.Ordered.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass3("XYZ", 2), new TestClass3("ABC", 1), new TestClass3("ABC", 42) }, Is.Ordered.By("A").Descending.Then.By("B") ), new TestCaseData( new [] { new TestClass3("XYZ", 2), new TestClass3("ABC", 1), new TestClass3("ABC", 42) }, Is.Ordered.Descending.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("ABC", 1), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").Ascending.Then.By("B").Descending ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("ABC", 1), new TestClass3("XYZ", 2) }, Is.Ordered.Ascending.By("A").Then.Descending.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("ABC", 1), new TestClass3("XYZ", 2) }, Is.Not.Ordered.By("A").Then.By("B") ), new TestCaseData( new[] { new TestClass3("XYZ", 2), new TestClass3("ABC", 42), new TestClass3("ABC", 1) }, Is.Ordered.By("A").Descending.Then.By("B").Descending ), new TestCaseData( new[] { new TestClass3("XYZ", 2), new TestClass3("ABC", 42), new TestClass3("ABC", 1) }, Is.Ordered.Descending.By("A").Then.Descending.By("B") ) }; #endregion #region Error Message Tests [Test] public void IsOrdered_Fails() { var expectedMessage = " Expected: collection ordered" + NL + " But was: < \"x\", \"z\", \"y\" >" + NL + " Ordering breaks at index [2]: \"y\"" + NL; var ex = Assert.Throws<AssertionException>(() => Assert.That(new[] { "x", "z", "y" }, Is.Ordered)); Assert.That(ex.Message, Is.EqualTo(expectedMessage)); } [Test] public void IsOrdered_DisplaysBreakingItemForHugeCollections() { var actual = Enumerable.Range(0, 100).ToArray(); actual[90] = 1000; var expectedMessage = " Expected: collection ordered" + NL + " But was: < ...83, 84, 85, 86, 87, 88, 89, 1000, 91, 92... >" + NL + " Ordering breaks at index [91]: 91" + NL; var ex = Assert.Throws<AssertionException>(() => Assert.That(actual, Is.Ordered)); Assert.That(ex.Message, Is.EqualTo(expectedMessage)); } #endregion #region Custom Comparer Tests [Test] public void IsOrdered_HandlesCustomComparison() { AlwaysEqualComparer comparer = new AlwaysEqualComparer(); Assert.That(new[] { new object(), new object() }, Is.Ordered.Using(comparer)); Assert.That(comparer.CallCount, Is.GreaterThan(0), "TestComparer was not called"); } [Test] public void ExceptionThrownForMultipleComparersInStep() { Assert.That(() => Is.Ordered.Using(new TestComparer()).Using(new AlwaysEqualComparer()), Throws.TypeOf<InvalidOperationException>()); } [Test] public void MultipleComparersUsedInDifferentSteps() { var comparer1 = new TestComparer(); var comparer2 = new AlwaysEqualComparer(); var collection = new[] { new TestClass3("XYZ", 2), new TestClass3("ABC", 42), new TestClass3("ABC", 1) }; Assert.That(collection, Is.Ordered.By("A").Using(comparer1).Then.By("B").Using(comparer2)); // First comparer is called for every pair of items in the collection Assert.That(comparer1.CallCount, Is.EqualTo(2), "First comparer should be called twice"); // Second comparer is only called where the first property matches Assert.That(comparer2.CallCount, Is.EqualTo(1), "Second comparer should be called once"); } [Test] public void IsOrdered_HandlesCustomComparison2() { TestComparer comparer = new TestComparer(); Assert.That(new[] { 2, 1 }, Is.Ordered.Using(comparer)); Assert.That(comparer.CallCount, Is.GreaterThan(0), "TestComparer was not called"); } [Test] public void UsesProvidedGenericComparer() { var comparer = new GenericComparer<int>(); Assert.That(new[] { 1, 2 }, Is.Ordered.Using(comparer)); Assert.That(comparer.WasCalled, "Comparer was not called"); } [Test] public void UsesProvidedGenericComparison() { var comparer = new GenericComparison<int>(); Assert.That(new[] { 1, 2 }, Is.Ordered.Using(comparer.Delegate)); Assert.That(comparer.WasCalled, "Comparer was not called"); } [Test] public void UsesProvidedLambda() { Comparison<int> comparer = (x, y) => x.CompareTo(y); Assert.That(new[] { 1, 2 }, Is.Ordered.Using(comparer)); } #endregion #region Exception Tests [Test] public void ExceptionThrownForRepeatedAscending() { Assert.That(() => Is.Ordered.Ascending.Ascending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void ExceptionThrownForRepeatedDescending() { Assert.That(() => Is.Ordered.Descending.Descending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void ExceptionThrownForAscendingPlusDescending() { Assert.That(() => Is.Ordered.Ascending.Descending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void ExceptionThrownForAscendingByDescending() { Assert.That(() => Is.Ordered.Ascending.By("A").Descending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void IsOrderedByProperty_ThrowsOnNull() { var ex = Assert.Throws<ArgumentNullException>(() => Assert.That(new[] { new TestClass4("x"), null, new TestClass4("z") }, Is.Ordered.By("Value"))); Assert.That(ex.Message, Does.Contain("index 1")); } [Test] public void IsOrdered_TypesMustBeComparable() { Assert.Throws<ArgumentException>(() => Assert.That(new object[] { 1, "x" }, Is.Ordered)); } [Test] public void IsOrdered_AtLeastOneArgMustImplementIComparable() { Assert.Throws<ArgumentException>(() => Assert.That(new[] { new object(), new object() }, Is.Ordered)); } [TestCaseSource(nameof(InvalidOrderedByData))] public void IsOrdered_ThrowsOnMissingProperty(object[] collection, string property, string expectedIndex) { Assert.That(() => Assert.That(collection, Is.Ordered.By(property)), Throws.ArgumentException.With.Message.Contain(expectedIndex)); } private static readonly object[] InvalidOrderedByData = new[] { new TestCaseData( new object [] { "a", "b" }, "A", "index 0"), new TestCaseData( new object [] { new TestClass3("a", 1), "b" }, "A", "index 1"), new TestCaseData( new object [] { new TestClass3("a", 1), new TestClass3("b", 1), new TestClass4("c") }, "A", "index 2"), }; #endregion #region Test Classes public class TestClass1 { public int Value { get; } public TestClass1(int value) { Value = value; } public override string ToString() { return Value.ToString(); } } private class TestClass2 { public int Value { get; } public TestClass2(int value) { Value = value; } public override string ToString() { return Value.ToString(); } } public class TestClass3 { public string A { get; } public int B { get; } public TestClass3(string a, int b) { A = a; B = b; } public override string ToString() { return A.ToString() + "," + B.ToString(); } } public class TestClass4 { public readonly string A; public TestClass4(string a) { A = a; } public override string ToString() { return A; } } #endregion } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: money/cards/transactions/payment_card_sale_void.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace HOLMS.Types.Money.Cards.Transactions { /// <summary>Holder for reflection information generated from money/cards/transactions/payment_card_sale_void.proto</summary> public static partial class PaymentCardSaleVoidReflection { #region Descriptor /// <summary>File descriptor for money/cards/transactions/payment_card_sale_void.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static PaymentCardSaleVoidReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CjVtb25leS9jYXJkcy90cmFuc2FjdGlvbnMvcGF5bWVudF9jYXJkX3NhbGVf", "dm9pZC5wcm90bxIkaG9sbXMudHlwZXMubW9uZXkuY2FyZHMudHJhbnNhY3Rp", "b25zGjttb25leS9jYXJkcy90cmFuc2FjdGlvbnMvcHJvY2Vzc29yX3RyYW5z", "YWN0aW9uX3Jlc3VsdC5wcm90bxofZ29vZ2xlL3Byb3RvYnVmL3RpbWVzdGFt", "cC5wcm90byK1AQoTUGF5bWVudENhcmRTYWxlVm9pZBIdChVob3N0X3JlZmVy", "ZW5jZV9udW1iZXIYASABKAkSUAoGcmVzdWx0GAIgASgOMkAuaG9sbXMudHlw", "ZXMubW9uZXkuY2FyZHMudHJhbnNhY3Rpb25zLlByb2Nlc3NvclRyYW5zYWN0", "aW9uUmVzdWx0Ei0KCXBvc3RlZF9hdBgDIAEoCzIaLmdvb2dsZS5wcm90b2J1", "Zi5UaW1lc3RhbXBCQVoYbW9uZXkvY2FyZHMvdHJhbnNhY3Rpb25zqgIkSE9M", "TVMuVHlwZXMuTW9uZXkuQ2FyZHMuVHJhbnNhY3Rpb25zYgZwcm90bzM=")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResultReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::HOLMS.Types.Money.Cards.Transactions.PaymentCardSaleVoid), global::HOLMS.Types.Money.Cards.Transactions.PaymentCardSaleVoid.Parser, new[]{ "HostReferenceNumber", "Result", "PostedAt" }, null, null, null) })); } #endregion } #region Messages public sealed partial class PaymentCardSaleVoid : pb::IMessage<PaymentCardSaleVoid> { private static readonly pb::MessageParser<PaymentCardSaleVoid> _parser = new pb::MessageParser<PaymentCardSaleVoid>(() => new PaymentCardSaleVoid()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<PaymentCardSaleVoid> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::HOLMS.Types.Money.Cards.Transactions.PaymentCardSaleVoidReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public PaymentCardSaleVoid() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public PaymentCardSaleVoid(PaymentCardSaleVoid other) : this() { hostReferenceNumber_ = other.hostReferenceNumber_; result_ = other.result_; PostedAt = other.postedAt_ != null ? other.PostedAt.Clone() : null; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public PaymentCardSaleVoid Clone() { return new PaymentCardSaleVoid(this); } /// <summary>Field number for the "host_reference_number" field.</summary> public const int HostReferenceNumberFieldNumber = 1; private string hostReferenceNumber_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string HostReferenceNumber { get { return hostReferenceNumber_; } set { hostReferenceNumber_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "result" field.</summary> public const int ResultFieldNumber = 2; private global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResult result_ = 0; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResult Result { get { return result_; } set { result_ = value; } } /// <summary>Field number for the "posted_at" field.</summary> public const int PostedAtFieldNumber = 3; private global::Google.Protobuf.WellKnownTypes.Timestamp postedAt_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public global::Google.Protobuf.WellKnownTypes.Timestamp PostedAt { get { return postedAt_; } set { postedAt_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as PaymentCardSaleVoid); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(PaymentCardSaleVoid other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (HostReferenceNumber != other.HostReferenceNumber) return false; if (Result != other.Result) return false; if (!object.Equals(PostedAt, other.PostedAt)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (HostReferenceNumber.Length != 0) hash ^= HostReferenceNumber.GetHashCode(); if (Result != 0) hash ^= Result.GetHashCode(); if (postedAt_ != null) hash ^= PostedAt.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (HostReferenceNumber.Length != 0) { output.WriteRawTag(10); output.WriteString(HostReferenceNumber); } if (Result != 0) { output.WriteRawTag(16); output.WriteEnum((int) Result); } if (postedAt_ != null) { output.WriteRawTag(26); output.WriteMessage(PostedAt); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (HostReferenceNumber.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(HostReferenceNumber); } if (Result != 0) { size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Result); } if (postedAt_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(PostedAt); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(PaymentCardSaleVoid other) { if (other == null) { return; } if (other.HostReferenceNumber.Length != 0) { HostReferenceNumber = other.HostReferenceNumber; } if (other.Result != 0) { Result = other.Result; } if (other.postedAt_ != null) { if (postedAt_ == null) { postedAt_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); } PostedAt.MergeFrom(other.PostedAt); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { HostReferenceNumber = input.ReadString(); break; } case 16: { result_ = (global::HOLMS.Types.Money.Cards.Transactions.ProcessorTransactionResult) input.ReadEnum(); break; } case 26: { if (postedAt_ == null) { postedAt_ = new global::Google.Protobuf.WellKnownTypes.Timestamp(); } input.ReadMessage(postedAt_); break; } } } } } #endregion } #endregion Designer generated code
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Dynamic.Utils; using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.ExceptionServices; using System.Threading; namespace System.Linq.Expressions.Interpreter { #if FEATURE_MAKE_RUN_METHODS internal static partial class DelegateHelpers { private const int MaximumArity = 17; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")] [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1506:AvoidExcessiveClassCoupling")] internal static Type MakeDelegate(Type[] types) { Debug.Assert(types != null && types.Length > 0); // Can only used predefined delegates if we have no byref types and // the arity is small enough to fit in Func<...> or Action<...> if (types.Length > MaximumArity || types.Any(t => t.IsByRef)) { throw ContractUtils.Unreachable; } Type returnType = types[types.Length - 1]; if (returnType == typeof(void)) { Array.Resize(ref types, types.Length - 1); switch (types.Length) { case 0: return typeof(Action); case 1: return typeof(Action<>).MakeGenericType(types); case 2: return typeof(Action<,>).MakeGenericType(types); case 3: return typeof(Action<,,>).MakeGenericType(types); case 4: return typeof(Action<,,,>).MakeGenericType(types); case 5: return typeof(Action<,,,,>).MakeGenericType(types); case 6: return typeof(Action<,,,,,>).MakeGenericType(types); case 7: return typeof(Action<,,,,,,>).MakeGenericType(types); case 8: return typeof(Action<,,,,,,,>).MakeGenericType(types); case 9: return typeof(Action<,,,,,,,,>).MakeGenericType(types); case 10: return typeof(Action<,,,,,,,,,>).MakeGenericType(types); case 11: return typeof(Action<,,,,,,,,,,>).MakeGenericType(types); case 12: return typeof(Action<,,,,,,,,,,,>).MakeGenericType(types); case 13: return typeof(Action<,,,,,,,,,,,,>).MakeGenericType(types); case 14: return typeof(Action<,,,,,,,,,,,,,>).MakeGenericType(types); case 15: return typeof(Action<,,,,,,,,,,,,,,>).MakeGenericType(types); case 16: return typeof(Action<,,,,,,,,,,,,,,,>).MakeGenericType(types); } } else { switch (types.Length) { case 1: return typeof(Func<>).MakeGenericType(types); case 2: return typeof(Func<,>).MakeGenericType(types); case 3: return typeof(Func<,,>).MakeGenericType(types); case 4: return typeof(Func<,,,>).MakeGenericType(types); case 5: return typeof(Func<,,,,>).MakeGenericType(types); case 6: return typeof(Func<,,,,,>).MakeGenericType(types); case 7: return typeof(Func<,,,,,,>).MakeGenericType(types); case 8: return typeof(Func<,,,,,,,>).MakeGenericType(types); case 9: return typeof(Func<,,,,,,,,>).MakeGenericType(types); case 10: return typeof(Func<,,,,,,,,,>).MakeGenericType(types); case 11: return typeof(Func<,,,,,,,,,,>).MakeGenericType(types); case 12: return typeof(Func<,,,,,,,,,,,>).MakeGenericType(types); case 13: return typeof(Func<,,,,,,,,,,,,>).MakeGenericType(types); case 14: return typeof(Func<,,,,,,,,,,,,,>).MakeGenericType(types); case 15: return typeof(Func<,,,,,,,,,,,,,,>).MakeGenericType(types); case 16: return typeof(Func<,,,,,,,,,,,,,,,>).MakeGenericType(types); case 17: return typeof(Func<,,,,,,,,,,,,,,,,>).MakeGenericType(types); } } throw ContractUtils.Unreachable; } } #endif internal static class ScriptingRuntimeHelpers { public static object Int32ToObject(int i) { switch (i) { case -1: return Utils.BoxedIntM1; case 0: return Utils.BoxedInt0; case 1: return Utils.BoxedInt1; case 2: return Utils.BoxedInt2; case 3: return Utils.BoxedInt3; } return i; } internal static object GetPrimitiveDefaultValue(Type type) { object result; switch (type.GetTypeCode()) { case TypeCode.Boolean: result = Utils.BoxedFalse; break; case TypeCode.SByte: result = Utils.BoxedDefaultSByte; break; case TypeCode.Byte: result = Utils.BoxedDefaultByte; break; case TypeCode.Char: result = Utils.BoxedDefaultChar; break; case TypeCode.Int16: result = Utils.BoxedDefaultInt16; break; case TypeCode.Int32: result = Utils.BoxedInt0; break; case TypeCode.Int64: result = Utils.BoxedDefaultInt64; break; case TypeCode.UInt16: result = Utils.BoxedDefaultUInt16; break; case TypeCode.UInt32: result = Utils.BoxedDefaultUInt32; break; case TypeCode.UInt64: result = Utils.BoxedDefaultUInt64; break; case TypeCode.Single: return Utils.BoxedDefaultSingle; case TypeCode.Double: return Utils.BoxedDefaultDouble; case TypeCode.DateTime: return Utils.BoxedDefaultDateTime; case TypeCode.Decimal: return Utils.BoxedDefaultDecimal; default: // Also covers DBNull which is a class. return null; } if (type.IsEnum) { result = Enum.ToObject(type, result); } return result; } } internal static class ExceptionHelpers { /// <summary> /// Updates an exception before it's getting re-thrown so /// we can present a reasonable stack trace to the user. /// </summary> public static void UnwrapAndRethrow(TargetInvocationException exception) { ExceptionDispatchInfo.Capture(exception.InnerException).Throw(); } } /// <summary> /// A hybrid dictionary which compares based upon object identity. /// </summary> internal class HybridReferenceDictionary<TKey, TValue> where TKey : class { private KeyValuePair<TKey, TValue>[] _keysAndValues; private Dictionary<TKey, TValue> _dict; private int _count; private const int ArraySize = 10; public HybridReferenceDictionary() { } public bool TryGetValue(TKey key, out TValue value) { Debug.Assert(key != null); if (_dict != null) { return _dict.TryGetValue(key, out value); } else if (_keysAndValues != null) { for (int i = 0; i < _keysAndValues.Length; i++) { if (_keysAndValues[i].Key == key) { value = _keysAndValues[i].Value; return true; } } } value = default(TValue); return false; } public bool Remove(TKey key) { Debug.Assert(key != null); if (_dict != null) { return _dict.Remove(key); } else if (_keysAndValues != null) { for (int i = 0; i < _keysAndValues.Length; i++) { if (_keysAndValues[i].Key == key) { _keysAndValues[i] = new KeyValuePair<TKey, TValue>(); _count--; return true; } } } return false; } public bool ContainsKey(TKey key) { Debug.Assert(key != null); if (_dict != null) { return _dict.ContainsKey(key); } else if (_keysAndValues != null) { for (int i = 0; i < _keysAndValues.Length; i++) { if (_keysAndValues[i].Key == key) { return true; } } } return false; } public int Count { get { if (_dict != null) { return _dict.Count; } return _count; } } public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { if (_dict != null) { return _dict.GetEnumerator(); } return GetEnumeratorWorker(); } private IEnumerator<KeyValuePair<TKey, TValue>> GetEnumeratorWorker() { if (_keysAndValues != null) { for (int i = 0; i < _keysAndValues.Length; i++) { if (_keysAndValues[i].Key != null) { yield return _keysAndValues[i]; } } } } public TValue this[TKey key] { get { Debug.Assert(key != null); TValue res; if (TryGetValue(key, out res)) { return res; } throw new KeyNotFoundException(); } set { Debug.Assert(key != null); if (_dict != null) { _dict[key] = value; } else { int index; if (_keysAndValues != null) { index = -1; for (int i = 0; i < _keysAndValues.Length; i++) { if (_keysAndValues[i].Key == key) { _keysAndValues[i] = new KeyValuePair<TKey, TValue>(key, value); return; } else if (_keysAndValues[i].Key == null) { index = i; } } } else { _keysAndValues = new KeyValuePair<TKey, TValue>[ArraySize]; index = 0; } if (index != -1) { _count++; _keysAndValues[index] = new KeyValuePair<TKey, TValue>(key, value); } else { _dict = new Dictionary<TKey, TValue>(); for (int i = 0; i < _keysAndValues.Length; i++) { _dict[_keysAndValues[i].Key] = _keysAndValues[i].Value; } _keysAndValues = null; _dict[key] = value; } } } } } internal static class Assert { [Conditional("DEBUG")] public static void NotNull(object var) { Debug.Assert(var != null); } } }
using System; using System.Diagnostics; using System.Threading; namespace System.Data.SQLite { public partial class Sqlite3 { /* ** 2008 October 07 ** ** The author disclaims copyright to this source code. In place of ** a legal notice, here is a blessing: ** ** May you do good and not evil. ** May you find forgiveness for yourself and forgive others. ** May you share freely, never taking more than you give. ** ************************************************************************* ** This file contains the C functions that implement mutexes. ** ** This implementation in this file does not provide any mutual ** exclusion and is thus suitable for use only in applications ** that use SQLite in a single thread. The routines defined ** here are place-holders. Applications can substitute working ** mutex routines at start-time using the ** ** sqlite3_config(SQLITE_CONFIG_MUTEX,...) ** ** interface. ** ** If compiled with SQLITE_DEBUG, then additional logic is inserted ** that does error checking on mutexes to make sure they are being ** called correctly. ************************************************************************* ** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart ** C#-SQLite is an independent reimplementation of the SQLite software library ** ** SQLITE_SOURCE_ID: 2009-12-07 16:39:13 1ed88e9d01e9eda5cbc622e7614277f29bcc551c ** ************************************************************************* */ //#include "sqliteInt.h" #if !SQLITE_DEBUG /* ** Stub routines for all mutex methods. ** ** This routines provide no mutual exclusion or error checking. */ static int noopMutexHeld(sqlite3_mutex p) { return 1; } static int noopMutexNotheld(sqlite3_mutex p) { return 1; } static int noopMutexInit() { return SQLITE_OK; } static int noopMutexEnd() { return SQLITE_OK; } static sqlite3_mutex noopMutexAlloc(int id) { return new sqlite3_mutex(); } static void noopMutexFree(sqlite3_mutex p) { } static void noopMutexEnter(sqlite3_mutex p) { } static int noopMutexTry(sqlite3_mutex p) { return SQLITE_OK; } static void noopMutexLeave(sqlite3_mutex p) { } sqlite3_mutex_methods sqlite3DefaultMutex() { sqlite3_mutex_methods sMutex = new sqlite3_mutex_methods( (dxMutexInit)noopMutexInit, (dxMutexEnd)noopMutexEnd, (dxMutexAlloc)noopMutexAlloc, (dxMutexFree)noopMutexFree, (dxMutexEnter)noopMutexEnter, (dxMutexTry)noopMutexTry, (dxMutexLeave)noopMutexLeave, #if SQLITE_DEBUG (dxMutexHeld)noopMutexHeld, (dxMutexNotheld)noopMutexNotheld #else null, null #endif ); return sMutex; } #endif //* !SQLITE_DEBUG */ #if SQLITE_DEBUG && !SQLITE_MUTEX_OMIT /* ** In this implementation, error checking is provided for testing ** and debugging purposes. The mutexes still do not provide any ** mutual exclusion. */ /* ** The mutex object */ public class sqlite3_debug_mutex : sqlite3_mutex { //public int id; /* The mutex type */ public int cnt; /* Number of entries without a matching leave */ }; /* ** The sqlite3_mutex_held() and sqlite3_mutex_notheld() routine are ** intended for use inside Debug.Assert() statements. */ static bool debugMutexHeld( sqlite3_mutex pX ) { sqlite3_debug_mutex p = (sqlite3_debug_mutex)pX; return p == null || p.cnt > 0; } static bool debugMutexNotheld( sqlite3_mutex pX ) { sqlite3_debug_mutex p = (sqlite3_debug_mutex)pX; return p == null || p.cnt == 0; } /* ** Initialize and deinitialize the mutex subsystem. */ static int debugMutexInit() { return SQLITE_OK; } static int debugMutexEnd() { return SQLITE_OK; } /* ** The sqlite3_mutex_alloc() routine allocates a new ** mutex and returns a pointer to it. If it returns NULL ** that means that a mutex could not be allocated. */ static sqlite3_mutex debugMutexAlloc( int id ) { sqlite3_debug_mutex[] aStatic = new sqlite3_debug_mutex[6]; sqlite3_debug_mutex pNew = null; switch ( id ) { case SQLITE_MUTEX_FAST: case SQLITE_MUTEX_RECURSIVE: { pNew = new sqlite3_debug_mutex();//sqlite3Malloc(sizeof(*pNew)); if ( pNew != null ) { pNew.id = id; pNew.cnt = 0; } break; } default: { Debug.Assert( id - 2 >= 0 ); Debug.Assert( id - 2 < aStatic.Length );//(int)(sizeof(aStatic)/sizeof(aStatic[0])) ); pNew = aStatic[id - 2]; pNew.id = id; break; } } return pNew; } /* ** This routine deallocates a previously allocated mutex. */ static void debugMutexFree( sqlite3_mutex pX ) { sqlite3_debug_mutex p = (sqlite3_debug_mutex)pX; Debug.Assert( p.cnt == 0 ); Debug.Assert( p.id == SQLITE_MUTEX_FAST || p.id == SQLITE_MUTEX_RECURSIVE ); //sqlite3_free(ref p); } /* ** The sqlite3_mutex_enter() and sqlite3_mutex_try() routines attempt ** to enter a mutex. If another thread is already within the mutex, ** sqlite3_mutex_enter() will block and sqlite3_mutex_try() will return ** SQLITE_BUSY. The sqlite3_mutex_try() interface returns SQLITE_OK ** upon successful entry. Mutexes created using SQLITE_MUTEX_RECURSIVE can ** be entered multiple times by the same thread. In such cases the, ** mutex must be exited an equal number of times before another thread ** can enter. If the same thread tries to enter any other kind of mutex ** more than once, the behavior is undefined. */ static void debugMutexEnter( sqlite3_mutex pX ) { sqlite3_debug_mutex p = (sqlite3_debug_mutex)pX; Debug.Assert( p.id == SQLITE_MUTEX_RECURSIVE || debugMutexNotheld( p ) ); p.cnt++; } static int debugMutexTry( sqlite3_mutex pX ) { sqlite3_debug_mutex p = (sqlite3_debug_mutex)pX; Debug.Assert( p.id == SQLITE_MUTEX_RECURSIVE || debugMutexNotheld( p ) ); p.cnt++; return SQLITE_OK; } /* ** The sqlite3_mutex_leave() routine exits a mutex that was ** previously entered by the same thread. The behavior ** is undefined if the mutex is not currently entered or ** is not currently allocated. SQLite will never do either. */ static void debugMutexLeave( sqlite3_mutex pX ) { sqlite3_debug_mutex p = (sqlite3_debug_mutex)pX; Debug.Assert( debugMutexHeld( p ) ); p.cnt--; Debug.Assert( p.id == SQLITE_MUTEX_RECURSIVE || debugMutexNotheld( p ) ); } static sqlite3_mutex_methods sqlite3NoopMutex() { sqlite3_mutex_methods sMutex = new sqlite3_mutex_methods( (dxMutexInit)debugMutexInit, (dxMutexEnd)debugMutexEnd, (dxMutexAlloc)debugMutexAlloc, (dxMutexFree)debugMutexFree, (dxMutexEnter)debugMutexEnter, (dxMutexTry)debugMutexTry, (dxMutexLeave)debugMutexLeave, (dxMutexHeld)debugMutexHeld, (dxMutexNotheld)debugMutexNotheld ); return sMutex; } #endif //* SQLITE_DEBUG */ /* ** If compiled with SQLITE_MUTEX_NOOP, then the no-op mutex implementation ** is used regardless of the run-time threadsafety setting. */ #if SQLITE_MUTEX_NOOP sqlite3_mutex_methods const sqlite3DefaultMutex(void){ return sqlite3NoopMutex(); } #endif //* SQLITE_MUTEX_NOOP */ } }
//--------------------------------------------------------------------- // File: ExecuteSendPipelineStep.cs // // Summary: // //--------------------------------------------------------------------- // Copyright (c) 2004-2011, Kevin B. Smith. All rights reserved. // // THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY // KIND, WHETHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR // PURPOSE. //--------------------------------------------------------------------- using System; using System.IO; using System.Collections.Generic; using Winterdom.BizTalk.PipelineTesting; using System.Collections.ObjectModel; using BizUnit.Common; using BizUnit.TestSteps.BizTalk.Common; using BizUnit.Xaml; namespace BizUnit.TestSteps.BizTalk.Pipeline { /// <summary> /// The ExecuteReceivePipelineStep can be used to execute a pipeline and test the output from it /// </summary> /// /// <remarks> /// The following shows an example of the Xml representation of this test step. The step /// can perform a pipeline and output that to an output file /// /// <list type="table"> /// <listheader> /// <term>Pipeline:assemblyPath</term> /// <description>The assembly containing the BizTalk pipeline to execute.</description> /// </listheader> /// <item> /// <term>Pipeline:typeName</term> /// <description>The typename of the BizTalk pipeline to execute</description> /// </item> /// <item> /// <term>DocSpecs:assembly</term> /// <description>The assembly containing the BizTalk docspec schema assembly path (multiple)</description> /// </item> /// <item> /// <term>DocSpecs:type</term> /// <description>The assembly containing the BizTalk docspec schema type name (multiple)</description> /// </item> /// <item> /// <term>SourceDir</term> /// <description>The directory path for the source input file(s) to execute in the pipeline</description> /// </item> /// <item> /// <term>SearchPattern</term> /// <description>The search pattern for the input files. E.g. input*.xml</description> /// </item> /// <item> /// <term>Destination</term> /// <description>The destination to write the pipeline output file</description> /// </item> /// <item> /// <term>InputContextDir</term> /// <description>The directory path for the source context file(s) (optional)</description> /// </item> /// <item> /// <term>InputContextSearchPattern</term> /// <description>The search pattern for the source context file(s) (optional)</description> /// </item> /// <item> /// <term>OutputContextFile</term> /// <description>The location to write the output message context file (optional)</description> /// </item> /// <item> /// <term>SourceEncoding</term> /// <description>The charset to be written on the pipeline input message (optional)</description> /// </item> /// </list> /// </remarks> public class ExecuteSendPipelineStep : TestStepBase { private string _pipelineAssemblyPath; private string _pipelineTypeName; private Collection<DocSpecDefinition> _docSpecsRawList = new Collection<DocSpecDefinition>(); private Type[] _docSpecs; private string _instanceConfigFile; private string _sourceDir; private string _sourceEncoding; private string _searchPattern; private string _destination; private string _inputContextDir; private string _inputContextSearchPattern; private string _outputContextFile; ///<summary> /// Gets and sets the assembly path for the .NET type of the pipeline to be executed ///</summary> public string PipelineAssemblyPath { get { return _pipelineAssemblyPath; } set { _pipelineAssemblyPath = value; } } ///<summary> /// Gets and sets the type name for the .NET type of the pipeline to be executed ///</summary> public string PipelineTypeName { get { return _pipelineTypeName; } set { _pipelineTypeName = value; } } ///<summary> /// Gets and sets the docspecs for the pipeline to be executed. Pairs of typeName, assemblyPath. ///</summary> public Collection<DocSpecDefinition> DocSpecs { get { return _docSpecsRawList; } private set { _docSpecsRawList = value; } } ///<summary> /// Gets and sets the pipeline instance configuration for the pipeline to be executed ///</summary> public string InstanceConfigFile { get { return _instanceConfigFile; } set { _instanceConfigFile = value; } } ///<summary> /// Gets and sets the source file path for the input file to the pipeline ///</summary> public string SourceDir { get { return _sourceDir; } set { _sourceDir = value; } } ///<summary> /// Gets and sets the source encoding ///</summary> public string SourceEncoding { get { return _sourceEncoding; } set { _sourceEncoding = value; } } ///<summary> /// Gets and sets the search pattern for the input file ///</summary> public string SearchPattern { get { return _searchPattern; } set { _searchPattern = value; } } ///<summary> /// Gets and sets the destination of the pipeline output ///</summary> public string Destination { get { return _destination; } set { _destination = value; } } ///<summary> /// Gets and sets the directory containing the message context file for the input message ///</summary> public string InputContextDir { get { return _inputContextDir; } set { _inputContextDir = value; } } ///<summary> /// Gets and sets the message context search pattern for the input message ///</summary> public string InputContextSearchPattern { get { return _inputContextSearchPattern; } set { _inputContextSearchPattern = value; } } ///<summary> /// Gets and sets the file name for the message context for the output message ///</summary> public string OutputContextFile { get { return _outputContextFile; } set { _outputContextFile = value; } } /// <summary> /// TestStepBase.Execute() implementation /// </summary> /// <param name='context'>The context for the test, this holds state that is passed beteen tests</param> public override void Execute(Context context) { if (_docSpecsRawList.Count > 0) { var ds = new List<Type>(_docSpecsRawList.Count); foreach (var docSpec in _docSpecsRawList) { var ass = AssemblyHelper.LoadAssembly((string)docSpec.AssemblyPath); context.LogInfo("Loading DocumentSpec {0} from location {1}.", docSpec.TypeName, ass.Location); var type = ass.GetType(docSpec.TypeName); ds.Add(type); } _docSpecs = ds.ToArray(); } context.LogInfo("Loading pipeline {0} from location {1}.", _pipelineTypeName, _pipelineAssemblyPath); var pipelineType = ObjectCreator.GetType(_pipelineTypeName, _pipelineAssemblyPath); var pipelineWrapper = PipelineFactory.CreateSendPipeline(pipelineType); if (!string.IsNullOrEmpty(_instanceConfigFile)) { pipelineWrapper.ApplyInstanceConfig(_instanceConfigFile); } if (null != _docSpecs) { foreach (Type docSpec in _docSpecs) { pipelineWrapper.AddDocSpec(docSpec); } } var mc = new MessageCollection(); FileInfo[] contexts = null; if (!string.IsNullOrEmpty(_inputContextDir) && !string.IsNullOrEmpty(_inputContextSearchPattern)) { var cdi = new DirectoryInfo(_inputContextDir); contexts = cdi.GetFiles(_inputContextSearchPattern); } var di = new DirectoryInfo(_sourceDir); int index = 0; foreach (FileInfo fi in di.GetFiles(_searchPattern)) { Stream stream = new FileStream(fi.FullName, FileMode.Open, FileAccess.Read); var inputMessage = MessageHelper.CreateFromStream(stream); if (!string.IsNullOrEmpty(_sourceEncoding)) { inputMessage.BodyPart.Charset = _sourceEncoding; } // Load context file, add to message context. if ((null != contexts) && (contexts.Length > index)) { string cf = contexts[index].FullName; if (System.IO.File.Exists(cf)) { MessageInfo mi = MessageInfo.Deserialize(cf); mi.MergeIntoMessage(inputMessage); } } mc.Add(inputMessage); index++; } var outputMsg = pipelineWrapper.Execute(mc); PersistMessageHelper.PersistMessage(outputMsg, _destination); if (!string.IsNullOrEmpty(_outputContextFile)) { var omi = BizTalkMessageInfoFactory.CreateMessageInfo(outputMsg, _destination); MessageInfo.Serialize(omi, _outputContextFile); } } /// <summary> /// TestStepBase.Validate() implementation /// </summary> /// <param name='context'>The context for the test, this holds state that is passed beteen tests</param> public override void Validate(Context context) { ArgumentValidation.CheckForEmptyString(_pipelineTypeName, "pipelineTypeName"); // pipelineAssemblyPath - optional _destination = context.SubstituteWildCards(_destination); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.ComponentModel; using System.Diagnostics; using System.Runtime.InteropServices; using Gdip = System.Drawing.SafeNativeMethods.Gdip; namespace System.Drawing.Drawing2D { public sealed class LinearGradientBrush : Brush { private bool _interpolationColorsWasSet; public LinearGradientBrush(PointF point1, PointF point2, Color color1, Color color2) { Gdip.CheckStatus(Gdip.GdipCreateLineBrush( ref point1, ref point2, color1.ToArgb(), color2.ToArgb(), WrapMode.Tile, out IntPtr nativeBrush)); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(Point point1, Point point2, Color color1, Color color2) { Gdip.CheckStatus(Gdip.GdipCreateLineBrushI( ref point1, ref point2, color1.ToArgb(), color2.ToArgb(), WrapMode.Tile, out IntPtr nativeBrush)); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(RectangleF rect, Color color1, Color color2, LinearGradientMode linearGradientMode) { if (linearGradientMode < LinearGradientMode.Horizontal || linearGradientMode > LinearGradientMode.BackwardDiagonal) throw new InvalidEnumArgumentException(nameof(linearGradientMode), unchecked((int)linearGradientMode), typeof(LinearGradientMode)); if (rect.Width == 0.0 || rect.Height == 0.0) throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); Gdip.CheckStatus(Gdip.GdipCreateLineBrushFromRect( ref rect, color1.ToArgb(), color2.ToArgb(), linearGradientMode, WrapMode.Tile, out IntPtr nativeBrush)); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(Rectangle rect, Color color1, Color color2, LinearGradientMode linearGradientMode) { if (linearGradientMode < LinearGradientMode.Horizontal || linearGradientMode > LinearGradientMode.BackwardDiagonal) throw new InvalidEnumArgumentException(nameof(linearGradientMode), unchecked((int)linearGradientMode), typeof(LinearGradientMode)); if (rect.Width == 0 || rect.Height == 0) throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); Gdip.CheckStatus(Gdip.GdipCreateLineBrushFromRectI( ref rect, color1.ToArgb(), color2.ToArgb(), linearGradientMode, WrapMode.Tile, out IntPtr nativeBrush)); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(RectangleF rect, Color color1, Color color2, float angle) : this(rect, color1, color2, angle, false) { } public LinearGradientBrush(RectangleF rect, Color color1, Color color2, float angle, bool isAngleScaleable) { if (rect.Width == 0.0 || rect.Height == 0.0) throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); Gdip.CheckStatus(Gdip.GdipCreateLineBrushFromRectWithAngle( ref rect, color1.ToArgb(), color2.ToArgb(), angle, isAngleScaleable, (int)WrapMode.Tile, out IntPtr nativeBrush)); SetNativeBrushInternal(nativeBrush); } public LinearGradientBrush(Rectangle rect, Color color1, Color color2, float angle) : this(rect, color1, color2, angle, false) { } public LinearGradientBrush(Rectangle rect, Color color1, Color color2, float angle, bool isAngleScaleable) { if (rect.Width == 0 || rect.Height == 0) throw new ArgumentException(SR.Format(SR.GdiplusInvalidRectangle, rect.ToString())); Gdip.CheckStatus(Gdip.GdipCreateLineBrushFromRectWithAngleI( ref rect, color1.ToArgb(), color2.ToArgb(), angle, isAngleScaleable, WrapMode.Tile, out IntPtr nativeBrush)); SetNativeBrushInternal(nativeBrush); } internal LinearGradientBrush(IntPtr nativeBrush) { Debug.Assert(nativeBrush != IntPtr.Zero, "Initializing native brush with null."); SetNativeBrushInternal(nativeBrush); } public override object Clone() { Gdip.CheckStatus(Gdip.GdipCloneBrush(new HandleRef(this, NativeBrush), out IntPtr clonedBrush)); return new LinearGradientBrush(clonedBrush); } public Color[] LinearColors { get { int[] colors = new int[] { 0, 0 }; Gdip.CheckStatus(Gdip.GdipGetLineColors(new HandleRef(this, NativeBrush), colors)); return new Color[] { Color.FromArgb(colors[0]), Color.FromArgb(colors[1]) }; } set { Gdip.CheckStatus(Gdip.GdipSetLineColors( new HandleRef(this, NativeBrush), value[0].ToArgb(), value[1].ToArgb())); } } public RectangleF Rectangle { get { Gdip.CheckStatus(Gdip.GdipGetLineRect(new HandleRef(this, NativeBrush), out RectangleF rect)); return rect; } } public bool GammaCorrection { get { Gdip.CheckStatus(Gdip.GdipGetLineGammaCorrection( new HandleRef(this, NativeBrush), out bool useGammaCorrection)); return useGammaCorrection; } set { Gdip.CheckStatus(Gdip.GdipSetLineGammaCorrection(new HandleRef(this, NativeBrush), value)); } } public Blend Blend { get { // Interpolation colors and blends don't work together very well. Getting the Blend when InterpolationColors // is set puts the Brush into an unusable state afterwards. // Bail out here to avoid that. if (_interpolationColorsWasSet) return null; // Figure out the size of blend factor array. Gdip.CheckStatus(Gdip.GdipGetLineBlendCount(new HandleRef(this, NativeBrush), out int retval)); if (retval <= 0) return null; // Allocate a temporary native memory buffer. int count = retval; IntPtr factors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); factors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); // Retrieve horizontal blend factors. Gdip.CheckStatus(Gdip.GdipGetLineBlend(new HandleRef(this, NativeBrush), factors, positions, count)); // Return the result in a managed array. var blend = new Blend(count); Marshal.Copy(factors, blend.Factors, 0, count); Marshal.Copy(positions, blend.Positions, 0, count); return blend; } finally { if (factors != IntPtr.Zero) { Marshal.FreeHGlobal(factors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } set { // Do explicit parameter validation here; libgdiplus does not correctly validate the arguments // This is the original behavior on Desktop .NET if (value == null || value.Factors == null) throw new NullReferenceException(); if (value.Positions == null) throw new ArgumentNullException("source"); int count = value.Factors.Length; if (count == 0 || value.Positions.Length == 0) throw new ArgumentException(SR.BlendObjectMustHaveTwoElements); if (count >=2 && count != value.Positions.Length) throw new ArgumentOutOfRangeException(); if (count >= 2 && value.Positions[0] != 0.0F) throw new ArgumentException(SR.BlendObjectFirstElementInvalid); if (count >= 2 && value.Positions[count - 1] != 1.0F) throw new ArgumentException(SR.BlendObjectLastElementInvalid); // Allocate temporary native memory buffer and copy input blend factors into it. IntPtr factors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); factors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); Marshal.Copy(value.Factors, 0, factors, count); Marshal.Copy(value.Positions, 0, positions, count); // Set blend factors. Gdip.CheckStatus(Gdip.GdipSetLineBlend( new HandleRef(this, NativeBrush), new HandleRef(null, factors), new HandleRef(null, positions), count)); } finally { if (factors != IntPtr.Zero) { Marshal.FreeHGlobal(factors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } } public void SetSigmaBellShape(float focus) => SetSigmaBellShape(focus, (float)1.0); public void SetSigmaBellShape(float focus, float scale) { if (focus < 0 || focus > 1) throw new ArgumentException(SR.GdiplusInvalidParameter, nameof(focus)); if (scale < 0 || scale > 1) throw new ArgumentException(SR.GdiplusInvalidParameter, nameof(scale)); Gdip.CheckStatus(Gdip.GdipSetLineSigmaBlend(new HandleRef(this, NativeBrush), focus, scale)); } public void SetBlendTriangularShape(float focus) => SetBlendTriangularShape(focus, (float)1.0); public void SetBlendTriangularShape(float focus, float scale) { if (focus < 0 || focus > 1) throw new ArgumentException(SR.GdiplusInvalidParameter, nameof(focus)); if (scale < 0 || scale > 1) throw new ArgumentException(SR.GdiplusInvalidParameter, nameof(scale)); Gdip.CheckStatus(Gdip.GdipSetLineLinearBlend(new HandleRef(this, NativeBrush), focus, scale)); // Setting a triangular shape overrides the explicitly set interpolation colors. libgdiplus correctly clears // the interpolation colors (https://github.com/mono/libgdiplus/blob/master/src/lineargradientbrush.c#L959) but // returns WrongState instead of ArgumentException (https://github.com/mono/libgdiplus/blob/master/src/lineargradientbrush.c#L814) // when calling GdipGetLinePresetBlend, so it is important we set this to false. This way, we are sure get_InterpolationColors // will return an ArgumentException. _interpolationColorsWasSet = false; } public ColorBlend InterpolationColors { get { if (!_interpolationColorsWasSet) throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.InterpolationColorsColorBlendNotSet, string.Empty)); // Figure out the size of blend factor array. Gdip.CheckStatus(Gdip.GdipGetLinePresetBlendCount(new HandleRef(this, NativeBrush), out int retval)); // Allocate temporary native memory buffer. int count = retval; IntPtr colors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); colors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); // Retrieve horizontal blend factors. Gdip.CheckStatus(Gdip.GdipGetLinePresetBlend(new HandleRef(this, NativeBrush), colors, positions, count)); // Return the result in a managed array. var blend = new ColorBlend(count); int[] argb = new int[count]; Marshal.Copy(colors, argb, 0, count); Marshal.Copy(positions, blend.Positions, 0, count); // Copy ARGB values into Color array of ColorBlend. blend.Colors = new Color[argb.Length]; for (int i = 0; i < argb.Length; i++) { blend.Colors[i] = Color.FromArgb(argb[i]); } return blend; } finally { if (colors != IntPtr.Zero) { Marshal.FreeHGlobal(colors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } set { _interpolationColorsWasSet = true; if (value == null) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.InterpolationColorsInvalidColorBlendObject, string.Empty)); } else if (value.Colors.Length < 2) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.InterpolationColorsInvalidColorBlendObject, SR.InterpolationColorsLength)); } else if (value.Colors.Length != value.Positions.Length) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.InterpolationColorsInvalidColorBlendObject, SR.InterpolationColorsLengthsDiffer)); } else if (value.Positions[0] != 0.0f) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.InterpolationColorsInvalidColorBlendObject, SR.InterpolationColorsInvalidStartPosition)); } else if (value.Positions[value.Positions.Length - 1] != 1.0f) { throw new ArgumentException(SR.Format(SR.InterpolationColorsCommon, SR.InterpolationColorsInvalidColorBlendObject, SR.InterpolationColorsInvalidEndPosition)); } // Allocate a temporary native memory buffer and copy input blend factors into it. int count = value.Colors.Length; IntPtr colors = IntPtr.Zero; IntPtr positions = IntPtr.Zero; try { int size = checked(4 * count); colors = Marshal.AllocHGlobal(size); positions = Marshal.AllocHGlobal(size); int[] argbs = new int[count]; for (int i = 0; i < count; i++) { argbs[i] = value.Colors[i].ToArgb(); } Marshal.Copy(argbs, 0, colors, count); Marshal.Copy(value.Positions, 0, positions, count); // Set blend factors. Gdip.CheckStatus(Gdip.GdipSetLinePresetBlend(new HandleRef(this, NativeBrush), new HandleRef(null, colors), new HandleRef(null, positions), count)); } finally { if (colors != IntPtr.Zero) { Marshal.FreeHGlobal(colors); } if (positions != IntPtr.Zero) { Marshal.FreeHGlobal(positions); } } } } public WrapMode WrapMode { get { Gdip.CheckStatus(Gdip.GdipGetLineWrapMode(new HandleRef(this, NativeBrush), out int mode)); return (WrapMode)mode; } set { if (value < WrapMode.Tile || value > WrapMode.Clamp) throw new InvalidEnumArgumentException(nameof(value), (int)value, typeof(WrapMode)); Gdip.CheckStatus(Gdip.GdipSetLineWrapMode(new HandleRef(this, NativeBrush), unchecked((int)value))); } } public Matrix Transform { get { var matrix = new Matrix(); Gdip.CheckStatus(Gdip.GdipGetLineTransform(new HandleRef(this, NativeBrush), new HandleRef(matrix, matrix.NativeMatrix))); return matrix; } set { if (value == null) throw new ArgumentNullException(nameof(value)); Gdip.CheckStatus(Gdip.GdipSetLineTransform(new HandleRef(this, NativeBrush), new HandleRef(value, value.NativeMatrix))); } } public void ResetTransform() { Gdip.CheckStatus(Gdip.GdipResetLineTransform(new HandleRef(this, NativeBrush))); } public void MultiplyTransform(Matrix matrix) => MultiplyTransform(matrix, MatrixOrder.Prepend); public void MultiplyTransform(Matrix matrix, MatrixOrder order) { if (matrix == null) throw new ArgumentNullException(nameof(matrix)); // Multiplying the transform by a disposed matrix is a nop in GDI+, but throws // with the libgdiplus backend. Simulate a nop for compatability with GDI+. if (matrix.NativeMatrix == IntPtr.Zero) return; Gdip.CheckStatus(Gdip.GdipMultiplyLineTransform( new HandleRef(this, NativeBrush), new HandleRef(matrix, matrix.NativeMatrix), order)); } public void TranslateTransform(float dx, float dy) => TranslateTransform(dx, dy, MatrixOrder.Prepend); public void TranslateTransform(float dx, float dy, MatrixOrder order) { Gdip.CheckStatus(Gdip.GdipTranslateLineTransform( new HandleRef(this, NativeBrush), dx, dy, order)); } public void ScaleTransform(float sx, float sy) => ScaleTransform(sx, sy, MatrixOrder.Prepend); public void ScaleTransform(float sx, float sy, MatrixOrder order) { Gdip.CheckStatus( Gdip.GdipScaleLineTransform( new HandleRef(this, NativeBrush), sx, sy, order)); } public void RotateTransform(float angle) => RotateTransform(angle, MatrixOrder.Prepend); public void RotateTransform(float angle, MatrixOrder order) { Gdip.CheckStatus(Gdip.GdipRotateLineTransform( new HandleRef(this, NativeBrush), angle, order)); } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gagvr = Google.Ads.GoogleAds.V10.Resources; using gax = Google.Api.Gax; using sys = System; namespace Google.Ads.GoogleAds.V10.Resources { /// <summary>Resource name for the <c>Campaign</c> resource.</summary> public sealed partial class CampaignName : gax::IResourceName, sys::IEquatable<CampaignName> { /// <summary>The possible contents of <see cref="CampaignName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary>A resource name with pattern <c>customers/{customer_id}/campaigns/{campaign_id}</c>.</summary> CustomerCampaign = 1, } private static gax::PathTemplate s_customerCampaign = new gax::PathTemplate("customers/{customer_id}/campaigns/{campaign_id}"); /// <summary>Creates a <see cref="CampaignName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="CampaignName"/> containing the provided <paramref name="unparsedResourceName"/> /// . /// </returns> public static CampaignName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new CampaignName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="CampaignName"/> with the pattern <c>customers/{customer_id}/campaigns/{campaign_id}</c> /// . /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="CampaignName"/> constructed from the provided ids.</returns> public static CampaignName FromCustomerCampaign(string customerId, string campaignId) => new CampaignName(ResourceNameType.CustomerCampaign, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), campaignId: gax::GaxPreconditions.CheckNotNullOrEmpty(campaignId, nameof(campaignId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </returns> public static string Format(string customerId, string campaignId) => FormatCustomerCampaign(customerId, campaignId); /// <summary> /// Formats the IDs into the string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="CampaignName"/> with pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c>. /// </returns> public static string FormatCustomerCampaign(string customerId, string campaignId) => s_customerCampaign.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), gax::GaxPreconditions.CheckNotNullOrEmpty(campaignId, nameof(campaignId))); /// <summary>Parses the given resource name string into a new <see cref="CampaignName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="CampaignName"/> if successful.</returns> public static CampaignName Parse(string campaignName) => Parse(campaignName, false); /// <summary> /// Parses the given resource name string into a new <see cref="CampaignName"/> instance; optionally allowing an /// unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="CampaignName"/> if successful.</returns> public static CampaignName Parse(string campaignName, bool allowUnparsed) => TryParse(campaignName, allowUnparsed, out CampaignName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="CampaignName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="CampaignName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string campaignName, out CampaignName result) => TryParse(campaignName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="CampaignName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item><description><c>customers/{customer_id}/campaigns/{campaign_id}</c></description></item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="campaignName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="CampaignName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string campaignName, bool allowUnparsed, out CampaignName result) { gax::GaxPreconditions.CheckNotNull(campaignName, nameof(campaignName)); gax::TemplatedResourceName resourceName; if (s_customerCampaign.TryParseName(campaignName, out resourceName)) { result = FromCustomerCampaign(resourceName[0], resourceName[1]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(campaignName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private CampaignName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string campaignId = null, string customerId = null) { Type = type; UnparsedResource = unparsedResourceName; CampaignId = campaignId; CustomerId = customerId; } /// <summary> /// Constructs a new instance of a <see cref="CampaignName"/> class from the component parts of pattern /// <c>customers/{customer_id}/campaigns/{campaign_id}</c> /// </summary> /// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="campaignId">The <c>Campaign</c> ID. Must not be <c>null</c> or empty.</param> public CampaignName(string customerId, string campaignId) : this(ResourceNameType.CustomerCampaign, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), campaignId: gax::GaxPreconditions.CheckNotNullOrEmpty(campaignId, nameof(campaignId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Campaign</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CampaignId { get; } /// <summary> /// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string CustomerId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.CustomerCampaign: return s_customerCampaign.Expand(CustomerId, CampaignId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as CampaignName); /// <inheritdoc/> public bool Equals(CampaignName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(CampaignName a, CampaignName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(CampaignName a, CampaignName b) => !(a == b); } public partial class Campaign { /// <summary> /// <see cref="gagvr::CampaignName"/>-typed view over the <see cref="ResourceName"/> resource name property. /// </summary> internal CampaignName ResourceNameAsCampaignName { get => string.IsNullOrEmpty(ResourceName) ? null : gagvr::CampaignName.Parse(ResourceName, allowUnparsed: true); set => ResourceName = value?.ToString() ?? ""; } /// <summary> /// <see cref="gagvr::CampaignName"/>-typed view over the <see cref="BaseCampaign"/> resource name property. /// </summary> internal CampaignName BaseCampaignAsCampaignName { get => string.IsNullOrEmpty(BaseCampaign) ? null : gagvr::CampaignName.Parse(BaseCampaign, allowUnparsed: true); set => BaseCampaign = value?.ToString() ?? ""; } /// <summary> /// <see cref="gagvr::CampaignName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> internal CampaignName CampaignName { get => string.IsNullOrEmpty(Name) ? null : gagvr::CampaignName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } /// <summary> /// <see cref="CampaignLabelName"/>-typed view over the <see cref="Labels"/> resource name property. /// </summary> internal gax::ResourceNameList<CampaignLabelName> LabelsAsCampaignLabelNames { get => new gax::ResourceNameList<CampaignLabelName>(Labels, s => string.IsNullOrEmpty(s) ? null : CampaignLabelName.Parse(s, allowUnparsed: true)); } /// <summary> /// <see cref="CampaignBudgetName"/>-typed view over the <see cref="CampaignBudget"/> resource name property. /// </summary> internal CampaignBudgetName CampaignBudgetAsCampaignBudgetName { get => string.IsNullOrEmpty(CampaignBudget) ? null : CampaignBudgetName.Parse(CampaignBudget, allowUnparsed: true); set => CampaignBudget = value?.ToString() ?? ""; } /// <summary> /// <see cref="BiddingStrategyName"/>-typed view over the <see cref="BiddingStrategy"/> resource name property. /// </summary> internal BiddingStrategyName BiddingStrategyAsBiddingStrategyName { get => string.IsNullOrEmpty(BiddingStrategy) ? null : BiddingStrategyName.Parse(BiddingStrategy, allowUnparsed: true); set => BiddingStrategy = value?.ToString() ?? ""; } /// <summary> /// <see cref="AccessibleBiddingStrategyName"/>-typed view over the <see cref="AccessibleBiddingStrategy"/> /// resource name property. /// </summary> internal AccessibleBiddingStrategyName AccessibleBiddingStrategyAsAccessibleBiddingStrategyName { get => string.IsNullOrEmpty(AccessibleBiddingStrategy) ? null : AccessibleBiddingStrategyName.Parse(AccessibleBiddingStrategy, allowUnparsed: true); set => AccessibleBiddingStrategy = value?.ToString() ?? ""; } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.IO; using Signum.Utilities.Reflection; using System.Text.RegularExpressions; using System.Globalization; using System.Reflection; using System.Collections.Concurrent; using System.Collections; using Signum.Utilities.ExpressionTrees; namespace Signum.Utilities { public static class Csv { // Default changed since Excel exports not to UTF8 and https://stackoverflow.com/questions/49215791/vs-code-c-sharp-system-notsupportedexception-no-data-is-available-for-encodin public static Encoding DefaultEncoding => Encoding.UTF8; public static CultureInfo? DefaultCulture = null; public static string ToCsvFile<T>(this IEnumerable<T> collection, string fileName, Encoding? encoding = null, CultureInfo? culture = null, bool writeHeaders = true, bool autoFlush = false, bool append = false, Func<CsvMemberInfo<T>, CultureInfo, Func<object?, string?>>? toStringFactory = null) { using (FileStream fs = append ? new FileStream(fileName, FileMode.Append, FileAccess.Write) : File.Create(fileName)) ToCsv<T>(collection, fs, encoding, culture, writeHeaders, autoFlush, toStringFactory); return fileName; } public static byte[] ToCsvBytes<T>(this IEnumerable<T> collection, Encoding? encoding = null, CultureInfo? culture = null, bool writeHeaders = true, bool autoFlush = false, Func<CsvMemberInfo<T>, CultureInfo, Func<object?, string?>>? toStringFactory = null) { using (MemoryStream ms = new MemoryStream()) { collection.ToCsv(ms, encoding, culture, writeHeaders, autoFlush, toStringFactory); return ms.ToArray(); } } public static void ToCsv<T>(this IEnumerable<T> collection, Stream stream, Encoding? encoding = null, CultureInfo? culture = null, bool writeHeaders = true, bool autoFlush = false, Func<CsvMemberInfo<T>, CultureInfo, Func<object?, string?>>? toStringFactory = null) { var defEncoding = encoding ?? DefaultEncoding; var defCulture = culture ?? DefaultCulture ?? CultureInfo.CurrentCulture; string separator = defCulture.TextInfo.ListSeparator; if (typeof(IList).IsAssignableFrom(typeof(T))) { using (StreamWriter sw = new StreamWriter(stream, defEncoding) { AutoFlush = autoFlush }) { foreach (IList? row in collection) { for (int i = 0; i < row!.Count; i++) { var obj = row![i]; var str = EncodeCsv(ConvertToString(obj, null, defCulture), defCulture); sw.Write(str); if (i < row!.Count - 1) sw.Write(separator); else sw.WriteLine(); } } } } else { var members = CsvMemberCache<T>.Members; var toString = members.Select(c => GetToString(defCulture, c, toStringFactory)).ToList(); using (StreamWriter sw = new StreamWriter(stream, defEncoding) { AutoFlush = autoFlush }) { if (writeHeaders) sw.WriteLine(members.ToString(m => HandleSpaces(m.MemberInfo.Name), separator)); foreach (var item in collection) { for (int i = 0; i < members.Count; i++) { var member = members[i]; var toStr = toString[i]; if (!member.IsCollection) { if (i != 0) sw.Write(separator); var obj = member.MemberEntry.Getter!(item); var str = EncodeCsv(toStr(obj), defCulture); sw.Write(str); } else { var list = (IList?)member.MemberEntry.Getter!(item); for (int j = 0; j < list!.Count; j++) { if (!(i == 0 && j == 0)) sw.Write(separator); var str = EncodeCsv(toStr(list[j]), defCulture); sw.Write(str); } } } sw.WriteLine(); } } } } static string? EncodeCsv(string? p, CultureInfo culture) { if (p == null) return null; string separator = culture.TextInfo.ListSeparator; if (p.Contains(separator) || p.Contains("\"") || p.Contains("\r") || p.Contains("\n")) { return "\"" + p.Replace("\"", "\"\"") + "\""; } return p; } private static Func<object?, string?> GetToString<T>(CultureInfo culture, CsvMemberInfo<T> column, Func<CsvMemberInfo<T>, CultureInfo, Func<object?, string?>>? toStringFactory) { if (toStringFactory != null) { var result = toStringFactory(column, culture); if (result != null) return result; } return obj => ConvertToString(obj, column.Format, culture); } static string ConvertToString(object? obj, string? format, CultureInfo culture) { if (obj == null) return ""; if (obj is IFormattable f) return f.ToString(format, culture); else return obj!.ToString()!; } static string HandleSpaces(string p) { return p.Replace("__", "^").Replace("_", " ").Replace("^", "_"); } public static List<T> ReadFile<T>(string fileName, Encoding? encoding = null, CultureInfo? culture = null, int skipLines = 1, CsvReadOptions<T>? options = null) where T : class, new() { encoding = encoding ?? DefaultEncoding; culture = culture ?? DefaultCulture ?? CultureInfo.CurrentCulture; using (FileStream fs = File.OpenRead(fileName)) return ReadStream<T>(fs, encoding, culture, skipLines, options).ToList(); } public static List<T> ReadBytes<T>(byte[] data, Encoding? encoding = null, CultureInfo? culture = null, int skipLines = 1, CsvReadOptions<T>? options = null) where T : class, new() { using (MemoryStream ms = new MemoryStream(data)) return ReadStream<T>(ms, encoding, culture, skipLines, options).ToList(); } public static IEnumerable<T> ReadStream<T>(Stream stream, Encoding? encoding = null, CultureInfo? culture = null, int skipLines = 1, CsvReadOptions<T>? options = null) where T : class, new() { encoding = encoding ?? DefaultEncoding; var defCulture = culture ?? DefaultCulture ?? CultureInfo.CurrentCulture; var defOptions = options ?? new CsvReadOptions<T>(); var members = CsvMemberCache<T>.Members; var parsers = members.Select(m => GetParser(defCulture, m, defOptions.ParserFactory)).ToList(); Regex regex = GetRegex(defCulture, defOptions.RegexTimeout); if (defOptions.AsumeSingleLine) { using (StreamReader sr = new StreamReader(stream, encoding)) { for (int i = 0; i < skipLines; i++) sr.ReadLine(); var line = skipLines; while(true) { string? csvLine = sr.ReadLine(); if (csvLine == null) yield break; Match? m = null; T? t = null; try { m = regex.Match(csvLine); if (m.Length > 0) { t = ReadObject<T>(m, members, parsers); } } catch(Exception e) { e.Data["row"] = line; if (defOptions.SkipError == null || !defOptions.SkipError(e, m)) throw new ParseCsvException(e); } if (t != null) yield return t; } } } else { using (StreamReader sr = new StreamReader(stream, encoding)) { string str = sr.ReadToEnd(); var matches = regex.Matches(str).Cast<Match>(); if (skipLines > 0) matches = matches.Skip(skipLines); int line = skipLines; foreach (var m in matches) { if (m.Length > 0) { T? t = null; try { t = ReadObject<T>(m, members, parsers); } catch (Exception e) { e.Data["row"] = line; if (defOptions.SkipError == null || !defOptions.SkipError(e, m)) throw new ParseCsvException(e); } if (t != null) yield return t; } line++; } } } } public static T ReadLine<T>(string csvLine, CultureInfo? culture = null, CsvReadOptions<T>? options = null) where T : class, new() { var defOptions = options ?? new CsvReadOptions<T>(); var defCulture = culture ?? DefaultCulture ?? CultureInfo.CurrentCulture; Regex regex = GetRegex(defCulture, defOptions.RegexTimeout); Match m = regex.Match(csvLine); var members = CsvMemberCache<T>.Members; return ReadObject<T>(m, members, members.Select(c => GetParser(defCulture, c, defOptions.ParserFactory)).ToList()); } private static Func<string, object?> GetParser<T>(CultureInfo culture, CsvMemberInfo<T> column, Func<CsvMemberInfo<T>, CultureInfo, Func<string, object?>?>? parserFactory) { if (parserFactory != null) { var result = parserFactory(column, culture); if (result != null) return result; } var type = column.IsCollection ? column.MemberInfo.ReturningType().ElementType()! : column.MemberInfo.ReturningType(); return str => ConvertTo(str, type, culture, column.Format); } static T ReadObject<T>(Match m, List<CsvMemberInfo<T>> members, List<Func<string, object?>> parsers) where T : new() { var vals = m.Groups["val"].Captures; if (vals.Count < members.Count) throw new FormatException("Only {0} columns found (instead of {1}) in line: {2}".FormatWith(vals.Count, members.Count, m.Value)); T t = new T(); for (int i = 0; i < members.Count; i++) { var member = members[i]; var parser = parsers[i]; string? str = null; try { if (!member.IsCollection) { str = DecodeCsv(vals[i].Value); object? val = parser(str); member.MemberEntry.Setter!(t, val); } else { var list = (IList)Activator.CreateInstance(member.MemberInfo.ReturningType())!; for (int j = i; j < vals.Count; j++) { str = DecodeCsv(vals[j].Value); object? val = parser(str); list.Add(val); } member.MemberEntry.Setter!(t, list); } } catch (Exception e) { e.Data["value"] = str; e.Data["member"] = members[i].MemberInfo.Name; throw; } } return t; } static ConcurrentDictionary<char, Regex> regexCache = new ConcurrentDictionary<char, Regex>(); const string BaseRegex = @"^((?<val>'(?:[^']+|'')*'|[^;\r\n]*))?((?!($|\r\n));(?<val>'(?:[^']+|'')*'|[^;\r\n]*))*($|\r\n)"; static Regex GetRegex(CultureInfo culture, TimeSpan timeout) { char separator = culture.TextInfo.ListSeparator.SingleEx(); return regexCache.GetOrAdd(separator, s => new Regex(BaseRegex.Replace('\'', '"').Replace(';', s), RegexOptions.Multiline | RegexOptions.ExplicitCapture, timeout)); } static class CsvMemberCache<T> { static CsvMemberCache() { var memberEntries = MemberEntryFactory.GenerateList<T>(MemberOptions.Fields | MemberOptions.Properties | MemberOptions.Typed | MemberOptions.Setters | MemberOptions.Getter); Members = memberEntries.Select((me, i) => { var type = me.MemberInfo.ReturningType(); var isCollection = typeof(IList).IsAssignableFrom(type); if (isCollection) { if (type.IsArray) throw new InvalidOperationException($"{me.MemberInfo.Name} is an array, use a List<T> instead"); if (i != memberEntries.Count - 1) throw new InvalidOperationException($"{me.MemberInfo.Name} is of {type} but is not the last member"); } return new CsvMemberInfo<T>(i, me, me.MemberInfo.GetCustomAttribute<FormatAttribute>()?.Format, isCollection); }).ToList(); } public static List<CsvMemberInfo<T>> Members; } static string DecodeCsv(string s) { if (s.StartsWith("\"") && s.EndsWith("\"")) { string str = s.Substring(1, s.Length - 2).Replace("\"\"", "\""); return Regex.Replace(str, "(?<!\r)\n", "\r\n"); } return s; } static object? ConvertTo(string s, Type type, CultureInfo culture, string? format) { Type? baseType = Nullable.GetUnderlyingType(type); if (baseType != null) { if (!s.HasText()) return null; type = baseType; } if (type.IsEnum) return Enum.Parse(type, s); if (type == typeof(DateTime)) if (format == null) return DateTime.Parse(s, culture); else return DateTime.ParseExact(s, format, culture); return Convert.ChangeType(s, type, culture); } } public class CsvReadOptions<T> where T: class { public Func<CsvMemberInfo<T>, CultureInfo, Func<string, object?>?>? ParserFactory; public bool AsumeSingleLine = false; public Func<Exception, Match?, bool>? SkipError; public TimeSpan RegexTimeout = Regex.InfiniteMatchTimeout; } public class CsvMemberInfo<T> { public readonly int Index; public readonly MemberEntry<T> MemberEntry; public readonly string? Format; public readonly bool IsCollection; public MemberInfo MemberInfo { get { return this.MemberEntry.MemberInfo; } } internal CsvMemberInfo(int index, MemberEntry<T> memberEntry, string? format, bool isCollection) { this.Index = index; this.MemberEntry = memberEntry; this.Format = format; this.IsCollection = isCollection; } } [Serializable] public class ParseCsvException : Exception { public int? Row { get; set; } public string Member { get; set; } public string Value { get; set; } public ParseCsvException(Exception inner) : base(inner.Message, inner) { this.Row = (int?)inner.Data["row"]; this.Value = (string)inner.Data["value"]!; this.Member = (string)inner.Data["member"]!; } public override string Message { get { return $"(Row: {this.Row}, Member: {this.Member}, Value: '{this.Value}') {base.Message})"; } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Diagnostics; using CocosSharp; namespace tests { public class SchedulerPauseResume : SchedulerTestLayer { public override void OnEnter() { base.OnEnter(); Schedule(tick1, 0.5f); Schedule(tick2, 0.5f); Schedule(pause, 3.0f); } public override string title() { return "Pause / Resume"; } public override string subtitle() { return "Scheduler should be paused after 3 seconds. See console"; } public void tick1(float dt) { CCLog.Log("tick1"); } public void tick2(float dt) { CCLog.Log("tick2"); } public void pause(float dt) { Application.Scheduler.PauseTarget(this); } } public class SchedulerPauseResumeAll : SchedulerTestLayer { private static CCRotateBy rotateBy = new CCRotateBy(3.0f, 360); public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var sprite = new CCSprite("Images/grossinis_sister1.png"); sprite.Position = VisibleBoundsWorldspace.Center; AddChild(sprite); sprite.RepeatForever(rotateBy); // Add a menu item to resume the Scheduled actions. CCMenuItemFont.FontSize = 32; CCMenuItemFont.FontName = "MarkerFelt"; var menu = new CCMenu( new CCMenuItemFont("Resume", resume) ); menu.AlignItemsVertically(4); menu.Position = new CCPoint(s.Width / 2, s.Height / 4); AddChild(menu); Schedule (); Schedule (tick1, 0.5f); Schedule (tick2, 1.0f); Schedule (pause, 3.0f, 0, 0); } private int times; public override void Update (float dt) { // Create a counter so that we can actually see the console output times++; if (times < 20) // base.Update (dt); // Do nothing CCLog.Log ("Update {0}", dt); } public override void OnExit() { if (pausedTargets != null && pausedTargets.Count > 0) { Application.Scheduler.Resume (pausedTargets); pausedTargets.Clear (); } base.OnExit(); } public override string title() { return "Pause / Resume"; } public override string subtitle() { return "Everything will pause after 3s, then resume at 5s. See console"; } public void tick1(float dt) { CCLog.Log("tick1"); } public void tick2(float dt) { CCLog.Log("tick2"); } List<ICCUpdatable> pausedTargets; public void pause(float dt) { CCLog.Log ("Pausing"); pausedTargets = Application.Scheduler.PauseAllTargets(); // should have only 2 items: ActionManager, self Debug.Assert(pausedTargets.Count == 2, "Error: pausedTargets should have only 2 items"); } // public void resume(float dt) public void resume(object pSender) { times = 0; CCLog.Log("Resuming"); Application.Scheduler.Resume (pausedTargets); pausedTargets.Clear (); } } public class SchedulerPauseResumeUser : SchedulerTestLayer { private static CCRotateBy rotateBy = new CCRotateBy(3.0f, 360); public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var sprite = new CCSprite("Images/grossinis_sister1.png"); sprite.Position = VisibleBoundsWorldspace.Center; AddChild(sprite); sprite.RepeatForever(rotateBy); // Add a menu item to resume the Scheduled actions. uint fontSize = 32; string fontName = "MarkerFelt"; var menu = new CCMenu( new CCMenuItemFont("Resume", resume) ); menu.AlignItemsVertically(4); menu.Position = new CCPoint(s.Width / 2, s.Height / 4); AddChild(menu); Schedule (); Schedule (tick1, 0.5f); Schedule (tick2, 1.0f); Schedule (pause, 3.0f, 0, 0); } private int times; public override void Update (float dt) { // Create a counter so that we can actually see the console output times++; if (times < 20) // base.Update (dt); // Do nothing CCLog.Log ("Update {0}", dt); } public override void OnExit() { if (pausedTargets != null && pausedTargets.Count > 0) { Application.Scheduler.Resume (pausedTargets); pausedTargets.Clear (); } base.OnExit(); } public override string title() { return "Pause / Resume"; } public override string subtitle() { return "Everything will pause after 3s, then resume at 5s. See console"; } public void tick1(float dt) { CCLog.Log("tick1"); } public void tick2(float dt) { CCLog.Log("tick2"); } List<ICCUpdatable> pausedTargets; public void pause(float dt) { CCLog.Log ("Pausing"); pausedTargets = Application.Scheduler.PauseAllTargets(CCSchedulePriority.User); } // public void resume(float dt) public void resume(object pSender) { times = 0; CCLog.Log("Resuming"); Application.Scheduler.Resume (pausedTargets); pausedTargets.Clear (); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // /*============================================================ ** ** ** ** Purpose: Defines the lock that implements ** single-writer/multiple-reader semantics ** ** ===========================================================*/ #if FEATURE_RWLOCK namespace System.Threading { using System.Threading; using System.Security.Permissions; using System.Runtime.Remoting; using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.ConstrainedExecution; using System.Runtime.Versioning; using System.Diagnostics.Contracts; [HostProtection(Synchronization=true, ExternalThreading=true)] [ComVisible(true)] public sealed class ReaderWriterLock: CriticalFinalizerObject { /* * Constructor */ [System.Security.SecuritySafeCritical] // auto-generated public ReaderWriterLock() { PrivateInitialize(); } /* * Destructor */ [System.Security.SecuritySafeCritical] // auto-generated ~ReaderWriterLock() { PrivateDestruct(); } /* * Property that returns TRUE if the reader lock is held * by the current thread */ public bool IsReaderLockHeld { [System.Security.SecuritySafeCritical] // auto-generated [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] get { return(PrivateGetIsReaderLockHeld()); } } /* * Property that returns TRUE if the writer lock is held * by the current thread */ public bool IsWriterLockHeld { [System.Security.SecuritySafeCritical] // auto-generated [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] get { return(PrivateGetIsWriterLockHeld()); } } /* * Property that returns the current writer sequence number. * The caller should be a reader or writer for getting * meaningful results */ public int WriterSeqNum { [System.Security.SecuritySafeCritical] // auto-generated get { return(PrivateGetWriterSeqNum()); } } /* * Acquires reader lock. The thread will block if a different * thread has writer lock. */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void AcquireReaderLockInternal(int millisecondsTimeout); [System.Security.SecuritySafeCritical] // auto-generated public void AcquireReaderLock(int millisecondsTimeout) { AcquireReaderLockInternal(millisecondsTimeout); } [System.Security.SecuritySafeCritical] // auto-generated public void AcquireReaderLock(TimeSpan timeout) { long tm = (long)timeout.TotalMilliseconds; if (tm < -1 || tm > (long) Int32.MaxValue) throw new ArgumentOutOfRangeException("timeout", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegOrNegative1")); AcquireReaderLockInternal((int)tm); } /* * Acquires writer lock. The thread will block if a different * thread has reader lock. It will dead lock if this thread * has reader lock. Use UpgardeToWriterLock when you are not * sure if the thread has reader lock */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void AcquireWriterLockInternal(int millisecondsTimeout); [System.Security.SecuritySafeCritical] // auto-generated public void AcquireWriterLock(int millisecondsTimeout) { AcquireWriterLockInternal(millisecondsTimeout); } [System.Security.SecuritySafeCritical] // auto-generated public void AcquireWriterLock(TimeSpan timeout) { long tm = (long)timeout.TotalMilliseconds; if (tm < -1 || tm > (long) Int32.MaxValue) throw new ArgumentOutOfRangeException("timeout", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegOrNegative1")); AcquireWriterLockInternal((int)tm); } /* * Releases reader lock. */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private extern void ReleaseReaderLockInternal(); [System.Security.SecuritySafeCritical] // auto-generated [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public void ReleaseReaderLock() { ReleaseReaderLockInternal(); } /* * Releases writer lock. */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private extern void ReleaseWriterLockInternal(); [System.Security.SecuritySafeCritical] // auto-generated [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] public void ReleaseWriterLock() { ReleaseWriterLockInternal(); } /* * Upgardes the thread to a writer. If the thread has is a * reader, it is possible that the reader lock was * released before writer lock was acquired. */ [System.Security.SecuritySafeCritical] // auto-generated public LockCookie UpgradeToWriterLock(int millisecondsTimeout) { LockCookie result = new LockCookie (); FCallUpgradeToWriterLock (ref result, millisecondsTimeout); return result; } [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void FCallUpgradeToWriterLock(ref LockCookie result, int millisecondsTimeout); public LockCookie UpgradeToWriterLock(TimeSpan timeout) { long tm = (long)timeout.TotalMilliseconds; if (tm < -1 || tm > (long) Int32.MaxValue) throw new ArgumentOutOfRangeException("timeout", Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegOrNegative1")); return UpgradeToWriterLock((int)tm); } /* * Restores the lock status of the thread to the one it was * in when it called UpgradeToWriterLock. */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void DowngradeFromWriterLockInternal(ref LockCookie lockCookie); [System.Security.SecuritySafeCritical] // auto-generated public void DowngradeFromWriterLock(ref LockCookie lockCookie) { DowngradeFromWriterLockInternal(ref lockCookie); } /* * Releases the lock irrespective of the number of times the thread * acquired the lock */ [System.Security.SecuritySafeCritical] // auto-generated public LockCookie ReleaseLock() { LockCookie result = new LockCookie (); FCallReleaseLock (ref result); return result; } [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void FCallReleaseLock(ref LockCookie result); /* * Restores the lock status of the thread to the one it was * in when it called ReleaseLock. */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void RestoreLockInternal(ref LockCookie lockCookie); [System.Security.SecuritySafeCritical] // auto-generated public void RestoreLock(ref LockCookie lockCookie) { RestoreLockInternal(ref lockCookie); } /* * Internal helper that returns TRUE if the reader lock is held * by the current thread */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private extern bool PrivateGetIsReaderLockHeld(); /* * Internal helper that returns TRUE if the writer lock is held * by the current thread */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] [ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)] private extern bool PrivateGetIsWriterLockHeld(); /* * Internal helper that returns the current writer sequence * number. The caller should be a reader or writer for getting * meaningful results */ [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern int PrivateGetWriterSeqNum(); /* * Returns true if there were intermediate writes since the * sequence number was obtained. The caller should be * a reader or writer for getting meaningful results */ [System.Security.SecuritySafeCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] public extern bool AnyWritersSince(int seqNum); // Initialize state kept inside the lock [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void PrivateInitialize(); // Destruct resource associated with the lock [System.Security.SecurityCritical] // auto-generated [MethodImplAttribute(MethodImplOptions.InternalCall)] private extern void PrivateDestruct(); // State #pragma warning disable 169 #pragma warning disable 414 // These fields are not used from managed. private IntPtr _hWriterEvent; private IntPtr _hReaderEvent; private IntPtr _hObjectHandle; private int _dwState = 0; private int _dwULockID = 0; private int _dwLLockID = 0; private int _dwWriterID = 0; private int _dwWriterSeqNum = 0; private short _wWriterLevel; #if RWLOCK_STATISTICS // WARNING: You must explicitly #define RWLOCK_STATISTICS when you // build in both the VM and BCL directories if you want this. private int _dwReaderEntryCount = 0; private int _dwReaderContentionCount = 0; private int _dwWriterEntryCount = 0; private int _dwWriterContentionCount = 0; private int _dwEventsReleasedCount = 0; #endif // RWLOCK_STATISTICS #pragma warning restore 414 #pragma warning restore 169 } } #endif //FEATURE_RWLOCK
// Licensed to the .NET Foundation under one or more agreements. // See the LICENSE file in the project root for more information. // // System.Net.HttpEndPointManager // // Author: // Gonzalo Paniagua Javier (gonzalo@ximian.com) // // Copyright (c) 2005 Novell, Inc. (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System.Collections; using System.Collections.Generic; using System.Net.Sockets; namespace System.Net { internal sealed class HttpEndPointManager { private static Dictionary<IPAddress, Dictionary<int, HttpEndPointListener>> s_ipEndPoints = new Dictionary<IPAddress, Dictionary<int, HttpEndPointListener>>(); private HttpEndPointManager() { } public static void AddListener(HttpListener listener) { List<string> added = new List<string>(); try { lock ((s_ipEndPoints as ICollection).SyncRoot) { foreach (string prefix in listener.Prefixes) { AddPrefixInternal(prefix, listener); added.Add(prefix); } } } catch { foreach (string prefix in added) { RemovePrefix(prefix, listener); } throw; } } public static void AddPrefix(string prefix, HttpListener listener) { lock ((s_ipEndPoints as ICollection).SyncRoot) { AddPrefixInternal(prefix, listener); } } private static void AddPrefixInternal(string p, HttpListener listener) { int start = p.IndexOf(':') + 3; int colon = p.IndexOf(':', start); if (colon != -1) { // root can't be -1 here, since we've already checked for ending '/' in ListenerPrefix. int root = p.IndexOf('/', colon, p.Length - colon); string portString = p.Substring(colon + 1, root - colon - 1); int port; if (!int.TryParse(portString, out port) || port <= 0 || port >= 65536) { throw new HttpListenerException((int)HttpStatusCode.BadRequest, SR.net_invalid_port); } } ListenerPrefix lp = new ListenerPrefix(p); if (lp.Host != "*" && lp.Host != "+" && Uri.CheckHostName(lp.Host) == UriHostNameType.Unknown) throw new HttpListenerException((int)HttpStatusCode.BadRequest, SR.net_listener_host); if (lp.Path.IndexOf('%') != -1) throw new HttpListenerException((int)HttpStatusCode.BadRequest, SR.net_invalid_path); if (lp.Path.IndexOf("//", StringComparison.Ordinal) != -1) throw new HttpListenerException((int)HttpStatusCode.BadRequest, SR.net_invalid_path); // listens on all the interfaces if host name cannot be parsed by IPAddress. HttpEndPointListener epl = GetEPListener(lp.Host, lp.Port, listener, lp.Secure, out bool alreadyExists); if (alreadyExists) { throw new HttpListenerException(98, SR.Format(SR.net_listener_already, p)); } epl.AddPrefix(lp, listener); } private static HttpEndPointListener GetEPListener(string host, int port, HttpListener listener, bool secure, out bool alreadyExists) { alreadyExists = false; IPAddress addr; if (host == "*") addr = IPAddress.Any; else if (IPAddress.TryParse(host, out addr) == false) { try { IPHostEntry iphost = Dns.GetHostEntry(host); if (iphost != null) addr = iphost.AddressList[0]; else addr = IPAddress.Any; } catch { addr = IPAddress.Any; } } Dictionary<int, HttpEndPointListener> p = null; if (s_ipEndPoints.ContainsKey(addr)) { p = s_ipEndPoints[addr]; } else { p = new Dictionary<int, HttpEndPointListener>(); s_ipEndPoints[addr] = p; } HttpEndPointListener epl = null; if (p.ContainsKey(port)) { alreadyExists = true; epl = p[port]; } else { try { epl = new HttpEndPointListener(listener, addr, port, secure); } catch (SocketException ex) { throw new HttpListenerException(ex.ErrorCode, ex.Message); } p[port] = epl; } return epl; } public static void RemoveEndPoint(HttpEndPointListener epl, IPEndPoint ep) { lock ((s_ipEndPoints as ICollection).SyncRoot) { Dictionary<int, HttpEndPointListener> p = null; p = s_ipEndPoints[ep.Address]; p.Remove(ep.Port); if (p.Count == 0) { s_ipEndPoints.Remove(ep.Address); } epl.Close(); } } public static void RemoveListener(HttpListener listener) { lock ((s_ipEndPoints as ICollection).SyncRoot) { foreach (string prefix in listener.Prefixes) { RemovePrefixInternal(prefix, listener); } } } public static void RemovePrefix(string prefix, HttpListener listener) { lock ((s_ipEndPoints as ICollection).SyncRoot) { RemovePrefixInternal(prefix, listener); } } private static void RemovePrefixInternal(string prefix, HttpListener listener) { ListenerPrefix lp = new ListenerPrefix(prefix); if (lp.Path.IndexOf('%') != -1) return; if (lp.Path.IndexOf("//", StringComparison.Ordinal) != -1) return; HttpEndPointListener epl = GetEPListener(lp.Host, lp.Port, listener, lp.Secure, out bool ignored); epl.RemovePrefix(lp, listener); } } }
using UnityEngine; using UnityEditor; /// <summary> /// This editor helper class makes it easy to create and show a context menu. /// It ensures that it's possible to add multiple items with the same name. /// </summary> public static class NGUIContextMenu { [MenuItem("Help/NGUI Documentation")] static void ShowHelp0 (MenuCommand command) { NGUIHelp.Show(); } [MenuItem("CONTEXT/UIWidget/Copy Widget")] static void CopyStyle (MenuCommand command) { NGUISettings.CopyWidget(command.context as UIWidget); } [MenuItem("CONTEXT/UIWidget/Paste Widget Values")] static void PasteStyle (MenuCommand command) { NGUISettings.PasteWidget(command.context as UIWidget, true); } [MenuItem("CONTEXT/UIWidget/Paste Widget Style")] static void PasteStyle2 (MenuCommand command) { NGUISettings.PasteWidget(command.context as UIWidget, false); } [MenuItem("CONTEXT/UIWidget/Help")] static void ShowHelp1 (MenuCommand command) { NGUIHelp.Show(command.context); } [MenuItem("CONTEXT/UIButton/Help")] static void ShowHelp2 (MenuCommand command) { NGUIHelp.Show(typeof(UIButton)); } [MenuItem("CONTEXT/UIToggle/Help")] static void ShowHelp3 (MenuCommand command) { NGUIHelp.Show(typeof(UIToggle)); } [MenuItem("CONTEXT/UIRoot/Help")] static void ShowHelp4 (MenuCommand command) { NGUIHelp.Show(typeof(UIRoot)); } [MenuItem("CONTEXT/UICamera/Help")] static void ShowHelp5 (MenuCommand command) { NGUIHelp.Show(typeof(UICamera)); } [MenuItem("CONTEXT/UIAnchor/Help")] static void ShowHelp6 (MenuCommand command) { NGUIHelp.Show(typeof(UIAnchor)); } [MenuItem("CONTEXT/UIStretch/Help")] static void ShowHelp7 (MenuCommand command) { NGUIHelp.Show(typeof(UIStretch)); } [MenuItem("CONTEXT/UISlider/Help")] static void ShowHelp8 (MenuCommand command) { NGUIHelp.Show(typeof(UISlider)); } #if !UNITY_3_5 && !UNITY_4_0 && !UNITY_4_1 && !UNITY_4_2 [MenuItem("CONTEXT/UI2DSprite/Help")] static void ShowHelp9 (MenuCommand command) { NGUIHelp.Show(typeof(UI2DSprite)); } #endif [MenuItem("CONTEXT/UIScrollBar/Help")] static void ShowHelp10 (MenuCommand command) { NGUIHelp.Show(typeof(UIScrollBar)); } [MenuItem("CONTEXT/UIProgressBar/Help")] static void ShowHelp11 (MenuCommand command) { NGUIHelp.Show(typeof(UIProgressBar)); } [MenuItem("CONTEXT/UIPopupList/Help")] static void ShowHelp12 (MenuCommand command) { NGUIHelp.Show(typeof(UIPopupList)); } [MenuItem("CONTEXT/UIInput/Help")] static void ShowHelp13 (MenuCommand command) { NGUIHelp.Show(typeof(UIInput)); } [MenuItem("CONTEXT/UIKeyBinding/Help")] static void ShowHelp14 (MenuCommand command) { NGUIHelp.Show(typeof(UIKeyBinding)); } [MenuItem("CONTEXT/UIGrid/Help")] static void ShowHelp15 (MenuCommand command) { NGUIHelp.Show(typeof(UIGrid)); } [MenuItem("CONTEXT/UITable/Help")] static void ShowHelp16 (MenuCommand command) { NGUIHelp.Show(typeof(UITable)); } [MenuItem("CONTEXT/UIPlayTween/Help")] static void ShowHelp17 (MenuCommand command) { NGUIHelp.Show(typeof(UIPlayTween)); } [MenuItem("CONTEXT/UIPlayAnimation/Help")] static void ShowHelp18 (MenuCommand command) { NGUIHelp.Show(typeof(UIPlayAnimation)); } [MenuItem("CONTEXT/UIPlaySound/Help")] static void ShowHelp19 (MenuCommand command) { NGUIHelp.Show(typeof(UIPlaySound)); } [MenuItem("CONTEXT/UIScrollView/Help")] static void ShowHelp20 (MenuCommand command) { NGUIHelp.Show(typeof(UIScrollView)); } [MenuItem("CONTEXT/UIDragScrollView/Help")] static void ShowHelp21 (MenuCommand command) { NGUIHelp.Show(typeof(UIDragScrollView)); } [MenuItem("CONTEXT/UICenterOnChild/Help")] static void ShowHelp22 (MenuCommand command) { NGUIHelp.Show(typeof(UICenterOnChild)); } [MenuItem("CONTEXT/UICenterOnClick/Help")] static void ShowHelp23 (MenuCommand command) { NGUIHelp.Show(typeof(UICenterOnClick)); } [MenuItem("CONTEXT/UITweener/Help")] [MenuItem("CONTEXT/UIPlayTween/Help")] static void ShowHelp24 (MenuCommand command) { NGUIHelp.Show(typeof(UITweener)); } [MenuItem("CONTEXT/ActiveAnimation/Help")] [MenuItem("CONTEXT/UIPlayAnimation/Help")] static void ShowHelp25 (MenuCommand command) { NGUIHelp.Show(typeof(UIPlayAnimation)); } [MenuItem("CONTEXT/UIScrollView/Help")] [MenuItem("CONTEXT/UIDragScrollView/Help")] static void ShowHelp26 (MenuCommand command) { NGUIHelp.Show(typeof(UIScrollView)); } [MenuItem("CONTEXT/UIPanel/Help")] static void ShowHelp27 (MenuCommand command) { NGUIHelp.Show(typeof(UIPanel)); } public delegate UIWidget AddFunc (GameObject go); static BetterList<string> mEntries = new BetterList<string>(); static GenericMenu mMenu; /// <summary> /// Clear the context menu list. /// </summary> static public void Clear () { mEntries.Clear(); mMenu = null; } /// <summary> /// Add a new context menu entry. /// </summary> static public void AddItem (string item, bool isChecked, GenericMenu.MenuFunction2 callback, object param) { if (callback != null) { if (mMenu == null) mMenu = new GenericMenu(); int count = 0; for (int i = 0; i < mEntries.size; ++i) { string str = mEntries[i]; if (str == item) ++count; } mEntries.Add(item); if (count > 0) item += " [" + count + "]"; mMenu.AddItem(new GUIContent(item), isChecked, callback, param); } else AddDisabledItem(item); } /// <summary> /// Wrapper function called by the menu that in turn calls the correct callback. /// </summary> static void AddChild (object obj) { AddFunc func = obj as AddFunc; UIWidget widget = func(Selection.activeGameObject); if (widget != null) Selection.activeGameObject = widget.gameObject; } /// <summary> /// Add a new context menu entry. /// </summary> static void AddChildWidget (string item, bool isChecked, AddFunc callback) { if (callback != null) { if (mMenu == null) mMenu = new GenericMenu(); int count = 0; for (int i = 0; i < mEntries.size; ++i) { string str = mEntries[i]; if (str == item) ++count; } mEntries.Add(item); if (count > 0) item += " [" + count + "]"; mMenu.AddItem(new GUIContent(item), isChecked, AddChild, callback); } else AddDisabledItem(item); } /// <summary> /// Wrapper function called by the menu that in turn calls the correct callback. /// </summary> static void AddSibling (object obj) { AddFunc func = obj as AddFunc; UIWidget widget = func(Selection.activeTransform.parent.gameObject); if (widget != null) Selection.activeGameObject = widget.gameObject; } /// <summary> /// Add a new context menu entry. /// </summary> static void AddSiblingWidget (string item, bool isChecked, AddFunc callback) { if (callback != null) { if (mMenu == null) mMenu = new GenericMenu(); int count = 0; for (int i = 0; i < mEntries.size; ++i) { string str = mEntries[i]; if (str == item) ++count; } mEntries.Add(item); if (count > 0) item += " [" + count + "]"; mMenu.AddItem(new GUIContent(item), isChecked, AddSibling, callback); } else AddDisabledItem(item); } /// <summary> /// Add commonly NGUI context menu options. /// </summary> static public void AddCommonItems (GameObject target) { if (target != null) { UIWidget widget = target.GetComponent<UIWidget>(); string myName = string.Format("Selected {0}", (widget != null) ? NGUITools.GetTypeName(widget) : "Object"); AddItem(myName + "/Bring to Front", false, delegate(object obj) { NGUITools.BringForward(Selection.activeGameObject); }, null); AddItem(myName + "/Push to Back", false, delegate(object obj) { NGUITools.PushBack(Selection.activeGameObject); }, null); AddItem(myName + "/Nudge Forward", false, delegate(object obj) { NGUITools.AdjustDepth(Selection.activeGameObject, 1); }, null); AddItem(myName + "/Nudge Back", false, delegate(object obj) { NGUITools.AdjustDepth(Selection.activeGameObject, -1); }, null); if (widget != null) { NGUIContextMenu.AddSeparator(myName + "/"); AddItem(myName + "/Make Pixel-Perfect", false, OnMakePixelPerfect, Selection.activeTransform); if (target.GetComponent<BoxCollider>() != null) { AddItem(myName + "/Reset Collider Size", false, OnBoxCollider, target); } } NGUIContextMenu.AddSeparator(myName + "/"); AddItem(myName + "/Delete", false, OnDelete, target); NGUIContextMenu.AddSeparator(""); if (Selection.activeTransform.parent != null && widget != null) { AddChildWidget("Create/Sprite/Child", false, NGUISettings.AddSprite); AddChildWidget("Create/Label/Child", false, NGUISettings.AddLabel); AddChildWidget("Create/Invisible Widget/Child", false, NGUISettings.AddWidget); AddChildWidget("Create/Simple Texture/Child", false, NGUISettings.AddTexture); #if !UNITY_3_5 && !UNITY_4_0 && !UNITY_4_1 && !UNITY_4_2 AddChildWidget("Create/Unity 2D Sprite/Child", false, NGUISettings.Add2DSprite); #endif AddSiblingWidget("Create/Sprite/Sibling", false, NGUISettings.AddSprite); AddSiblingWidget("Create/Label/Sibling", false, NGUISettings.AddLabel); AddSiblingWidget("Create/Invisible Widget/Sibling", false, NGUISettings.AddWidget); AddSiblingWidget("Create/Simple Texture/Sibling", false, NGUISettings.AddTexture); #if !UNITY_3_5 && !UNITY_4_0 && !UNITY_4_1 && !UNITY_4_2 AddSiblingWidget("Create/Unity 2D Sprite/Sibling", false, NGUISettings.Add2DSprite); #endif } else { AddChildWidget("Create/Sprite", false, NGUISettings.AddSprite); AddChildWidget("Create/Label", false, NGUISettings.AddLabel); AddChildWidget("Create/Invisible Widget", false, NGUISettings.AddWidget); AddChildWidget("Create/Simple Texture", false, NGUISettings.AddTexture); #if !UNITY_3_5 && !UNITY_4_0 && !UNITY_4_1 && !UNITY_4_2 AddChildWidget("Create/Unity 2D Sprite", false, NGUISettings.Add2DSprite); #endif } NGUIContextMenu.AddSeparator("Create/"); AddItem("Create/Panel", false, AddPanel, target); AddItem("Create/Scroll View", false, AddScrollView, target); AddItem("Create/Grid", false, AddChild<UIGrid>, target); AddItem("Create/Table", false, AddChild<UITable>, target); AddItem("Create/Anchor (Legacy)", false, AddChild<UIAnchor>, target); if (target.GetComponent<UIPanel>() != null) { if (target.GetComponent<UIScrollView>() == null) { AddItem("Attach/Scroll View", false, delegate(object obj) { target.AddComponent<UIScrollView>(); }, null); NGUIContextMenu.AddSeparator("Attach/"); } } else if (target.collider == null) { AddItem("Attach/Box Collider", false, delegate(object obj) { NGUITools.AddWidgetCollider(target); }, null); NGUIContextMenu.AddSeparator("Attach/"); } bool header = false; UIScrollView scrollView = NGUITools.FindInParents<UIScrollView>(target); if (scrollView != null) { if (scrollView.GetComponentInChildren<UICenterOnChild>() == null) { AddItem("Attach/Center Scroll View on Child", false, delegate(object obj) { target.AddComponent<UICenterOnChild>(); }, null); header = true; } } if (target.collider != null) { if (scrollView != null) { if (target.GetComponent<UIDragScrollView>() == null) { AddItem("Attach/Drag Scroll View", false, delegate(object obj) { target.AddComponent<UIDragScrollView>(); }, null); header = true; } if (target.GetComponent<UICenterOnClick>() == null && NGUITools.FindInParents<UICenterOnChild>(target) != null) { AddItem("Attach/Center Scroll View on Click", false, delegate(object obj) { target.AddComponent<UICenterOnClick>(); }, null); header = true; } } if (header) NGUIContextMenu.AddSeparator("Attach/"); AddItem("Attach/Button Script", false, delegate(object obj) { target.AddComponent<UIButton>(); }, null); AddItem("Attach/Toggle Script", false, delegate(object obj) { target.AddComponent<UIToggle>(); }, null); AddItem("Attach/Slider Script", false, delegate(object obj) { target.AddComponent<UISlider>(); }, null); AddItem("Attach/Scroll Bar Script", false, delegate(object obj) { target.AddComponent<UIScrollBar>(); }, null); AddItem("Attach/Progress Bar Script", false, delegate(object obj) { target.AddComponent<UISlider>(); }, null); AddItem("Attach/Popup List Script", false, delegate(object obj) { target.AddComponent<UIPopupList>(); }, null); AddItem("Attach/Input Field Script", false, delegate(object obj) { target.AddComponent<UIInput>(); }, null); NGUIContextMenu.AddSeparator("Attach/"); if (target.GetComponent<UIDragResize>() == null) AddItem("Attach/Drag Resize Script", false, delegate(object obj) { target.AddComponent<UIDragResize>(); }, null); if (target.GetComponent<UIDragScrollView>() == null) { for (int i = 0; i < UIPanel.list.size; ++i) { UIPanel pan = UIPanel.list[i]; if (pan.clipping == UIDrawCall.Clipping.None) continue; UIScrollView dr = pan.GetComponent<UIScrollView>(); if (dr == null) continue; AddItem("Attach/Drag Scroll View", false, delegate(object obj) { target.AddComponent<UIDragScrollView>().scrollView = dr; }, null); header = true; break; } } AddItem("Attach/Key Binding Script", false, delegate(object obj) { target.AddComponent<UIKeyBinding>(); }, null); NGUIContextMenu.AddSeparator("Attach/"); AddItem("Attach/Play Tween Script", false, delegate(object obj) { target.AddComponent<UIPlayTween>(); }, null); AddItem("Attach/Play Animation Script", false, delegate(object obj) { target.AddComponent<UIPlayAnimation>(); }, null); AddItem("Attach/Play Sound Script", false, delegate(object obj) { target.AddComponent<UIPlaySound>(); }, null); } if (widget != null) { AddMissingItem<TweenAlpha>(target, "Tween/Alpha"); AddMissingItem<TweenColor>(target, "Tween/Color"); AddMissingItem<TweenWidth>(target, "Tween/Width"); AddMissingItem<TweenHeight>(target, "Tween/Height"); } else if (target.GetComponent<UIPanel>() != null) { AddMissingItem<TweenAlpha>(target, "Tween/Alpha"); } NGUIContextMenu.AddSeparator("Tween/"); AddMissingItem<TweenPosition>(target, "Tween/Position"); AddMissingItem<TweenRotation>(target, "Tween/Rotation"); AddMissingItem<TweenScale>(target, "Tween/Scale"); AddMissingItem<TweenTransform>(target, "Tween/Transform"); if (target.GetComponent<AudioSource>() != null) AddMissingItem<TweenVolume>(target, "Tween/Volume"); if (target.GetComponent<Camera>() != null) { AddMissingItem<TweenFOV>(target, "Tween/Field of View"); AddMissingItem<TweenOrthoSize>(target, "Tween/Orthographic Size"); } } } /// <summary> /// Helper function. /// </summary> static void AddMissingItem<T> (GameObject target, string name) where T : MonoBehaviour { if (target.GetComponent<T>() == null) AddItem(name, false, delegate(object obj) { target.AddComponent<T>(); }, null); } /// <summary> /// Helper function for menu creation. /// </summary> static void AddChild<T> (object obj) where T : MonoBehaviour { GameObject go = obj as GameObject; T t = NGUITools.AddChild<T>(go); Selection.activeGameObject = t.gameObject; } /// <summary> /// Helper function for menu creation. /// </summary> static void AddPanel (object obj) { GameObject go = obj as GameObject; if (go.GetComponent<UIWidget>() != null) go = go.transform.parent.gameObject; UIPanel panel = NGUISettings.AddPanel(go); Selection.activeGameObject = panel.gameObject; } /// <summary> /// Helper function for menu creation. /// </summary> static void AddScrollView (object obj) { GameObject go = obj as GameObject; if (go.GetComponent<UIWidget>() != null) go = go.transform.parent.gameObject; UIPanel panel = NGUISettings.AddPanel(go); panel.clipping = UIDrawCall.Clipping.SoftClip; panel.gameObject.AddComponent<UIScrollView>(); panel.name = "Scroll View"; Selection.activeGameObject = panel.gameObject; } /// <summary> /// Add help options based on the components present on the specified game object. /// </summary> static public void AddHelp (GameObject go, bool addSeparator) { MonoBehaviour[] comps = Selection.activeGameObject.GetComponents<MonoBehaviour>(); bool addedSomething = false; for (int i = 0; i < comps.Length; ++i) { System.Type type = comps[i].GetType(); string url = NGUIHelp.GetHelpURL(type); if (url != null) { if (addSeparator) { addSeparator = false; AddSeparator(""); } AddItem("Help/" + type, false, delegate(object obj) { Application.OpenURL(url); }, null); addedSomething = true; } } if (addedSomething) AddSeparator("Help/"); AddItem("Help/All Topics", false, delegate(object obj) { NGUIHelp.Show(); }, null); } static void OnHelp (object obj) { NGUIHelp.Show(obj); } static void OnMakePixelPerfect (object obj) { NGUITools.MakePixelPerfect(obj as Transform); } static void OnBoxCollider (object obj) { NGUITools.AddWidgetCollider(obj as GameObject); } static void OnDelete (object obj) { GameObject go = obj as GameObject; Selection.activeGameObject = go.transform.parent.gameObject; #if UNITY_3_5 || UNITY_4_0 || UNITY_4_1 || UNITY_4_2 NGUITools.Destroy(go); #else Undo.DestroyObjectImmediate(go); #endif } /// <summary> /// Add a new disabled context menu entry. /// </summary> static public void AddDisabledItem (string item) { if (mMenu == null) mMenu = new GenericMenu(); mMenu.AddDisabledItem(new GUIContent(item)); } /// <summary> /// Add a separator to the menu. /// </summary> static public void AddSeparator (string path) { if (mMenu == null) mMenu = new GenericMenu(); // For some weird reason adding separators on OSX causes the entire menu to be disabled. Wtf? if (Application.platform != RuntimePlatform.OSXEditor) mMenu.AddSeparator(path); } /// <summary> /// Show the context menu with all the added items. /// </summary> static public void Show () { if (mMenu != null) { mMenu.ShowAsContext(); mMenu = null; mEntries.Clear(); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.CodeFixes.Suppression; using Microsoft.CodeAnalysis.CodeRefactorings; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Options; using Microsoft.CodeAnalysis.Editor.Shared.SuggestionSupport; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Internal.Log; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.TestHooks; using Microsoft.CodeAnalysis.Text; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.Suggestions { [Export(typeof(ISuggestedActionsSourceProvider))] [VisualStudio.Utilities.ContentType(ContentTypeNames.RoslynContentType)] [VisualStudio.Utilities.Name("Roslyn Code Fix")] [VisualStudio.Utilities.Order] internal class SuggestedActionsSourceProvider : ISuggestedActionsSourceProvider { private static readonly Guid s_CSharpSourceGuid = new Guid("b967fea8-e2c3-4984-87d4-71a38f49e16a"); private static readonly Guid s_visualBasicSourceGuid = new Guid("4de30e93-3e0c-40c2-a4ba-1124da4539f6"); private const int InvalidSolutionVersion = -1; private readonly ICodeRefactoringService _codeRefactoringService; private readonly IDiagnosticAnalyzerService _diagnosticService; private readonly ICodeFixService _codeFixService; private readonly ICodeActionEditHandlerService _editHandler; private readonly IAsynchronousOperationListener _listener; [ImportingConstructor] public SuggestedActionsSourceProvider( ICodeRefactoringService codeRefactoringService, IDiagnosticAnalyzerService diagnosticService, ICodeFixService codeFixService, ICodeActionEditHandlerService editHandler, [ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners) { _codeRefactoringService = codeRefactoringService; _diagnosticService = diagnosticService; _codeFixService = codeFixService; _editHandler = editHandler; _listener = new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.LightBulb); } public ISuggestedActionsSource CreateSuggestedActionsSource(ITextView textView, ITextBuffer textBuffer) { Contract.ThrowIfNull(textView); Contract.ThrowIfNull(textBuffer); return new Source(this, textView, textBuffer); } private class Source : ForegroundThreadAffinitizedObject, ISuggestedActionsSource { // state that will be only reset when source is disposed. private SuggestedActionsSourceProvider _owner; private ITextView _textView; private ITextBuffer _subjectBuffer; private WorkspaceRegistration _registration; // mutable state private Workspace _workspace; private int _lastSolutionVersionReported; public Source(SuggestedActionsSourceProvider owner, ITextView textView, ITextBuffer textBuffer) { _owner = owner; _textView = textView; _textView.Closed += OnTextViewClosed; _subjectBuffer = textBuffer; _registration = Workspace.GetWorkspaceRegistration(textBuffer.AsTextContainer()); _lastSolutionVersionReported = InvalidSolutionVersion; var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService; updateSource.DiagnosticsUpdated += OnDiagnosticsUpdated; if (_registration.Workspace != null) { _workspace = _registration.Workspace; _workspace.DocumentActiveContextChanged += OnActiveContextChanged; } _registration.WorkspaceChanged += OnWorkspaceChanged; } public event EventHandler<EventArgs> SuggestedActionsChanged; public bool TryGetTelemetryId(out Guid telemetryId) { telemetryId = default(Guid); var workspace = _workspace; if (workspace == null || _subjectBuffer == null) { return false; } var documentId = workspace.GetDocumentIdInCurrentContext(_subjectBuffer.AsTextContainer()); if (documentId == null) { return false; } var project = workspace.CurrentSolution.GetProject(documentId.ProjectId); if (project == null) { return false; } switch (project.Language) { case LanguageNames.CSharp: telemetryId = s_CSharpSourceGuid; return true; case LanguageNames.VisualBasic: telemetryId = s_visualBasicSourceGuid; return true; default: return false; } } public IEnumerable<SuggestedActionSet> GetSuggestedActions(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken) { AssertIsForeground(); using (Logger.LogBlock(FunctionId.SuggestedActions_GetSuggestedActions, cancellationToken)) { var documentAndSnapshot = GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).WaitAndGetResult(cancellationToken); if (!documentAndSnapshot.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return null; } var document = documentAndSnapshot.Value.Item1; var workspace = document.Project.Solution.Workspace; var supportSuggestion = workspace.Services.GetService<IDocumentSupportsSuggestionService>(); var fixes = GetCodeFixes(supportSuggestion, requestedActionCategories, workspace, document, range, cancellationToken); var refactorings = GetRefactorings(supportSuggestion, requestedActionCategories, workspace, document, range, cancellationToken); return (fixes == null) ? refactorings : (refactorings == null) ? fixes : fixes.Concat(refactorings); } } private IEnumerable<SuggestedActionSet> GetCodeFixes( IDocumentSupportsSuggestionService supportSuggestion, ISuggestedActionCategorySet requestedActionCategories, Workspace workspace, Document document, SnapshotSpan range, CancellationToken cancellationToken) { if (_owner._codeFixService != null && supportSuggestion.SupportsCodeFixes(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix)) { // We only include suppressions if lightbulb is asking for everything. // If the light bulb is only asking for code fixes, then we don't include suppressions. var includeSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any); var fixes = Task.Run( async () => await _owner._codeFixService.GetFixesAsync( document, range.Span.ToTextSpan(), includeSuppressionFixes, cancellationToken).ConfigureAwait(false), cancellationToken).WaitAndGetResult(cancellationToken); return OrganizeFixes(workspace, fixes, hasSuppressionFixes: includeSuppressionFixes); } return null; } /// <summary> /// Arrange fixes into groups based on the issue (diagnostic being fixed) and prioritize these groups. /// </summary> private IEnumerable<SuggestedActionSet> OrganizeFixes(Workspace workspace, IEnumerable<CodeFixCollection> fixCollections, bool hasSuppressionFixes) { var map = ImmutableDictionary.CreateBuilder<Diagnostic, IList<SuggestedAction>>(); var order = ImmutableArray.CreateBuilder<Diagnostic>(); // First group fixes by issue (diagnostic). GroupFixes(workspace, fixCollections, map, order, hasSuppressionFixes); // Then prioritize between the groups. return PrioritizeFixGroups(map.ToImmutable(), order.ToImmutable()); } /// <summary> /// Groups fixes by the diagnostic being addressed by each fix. /// </summary> private void GroupFixes(Workspace workspace, IEnumerable<CodeFixCollection> fixCollections, IDictionary<Diagnostic, IList<SuggestedAction>> map, IList<Diagnostic> order, bool hasSuppressionFixes) { foreach (var fixCollection in fixCollections) { var fixes = fixCollection.Fixes; var fixCount = fixes.Length; Func<CodeAction, SuggestedActionSet> getFixAllSuggestedActionSet = codeAction => CodeFixSuggestedAction.GetFixAllSuggestedActionSet(codeAction, fixCount, fixCollection.FixAllContext, workspace, _subjectBuffer, _owner._editHandler); foreach (var fix in fixes) { // Suppression fixes are handled below. if (!(fix.Action is SuppressionCodeAction)) { var suggestedAction = new CodeFixSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, fix, fixCollection.Provider, getFixAllSuggestedActionSet(fix.Action)); AddFix(fix, suggestedAction, map, order); } } if (hasSuppressionFixes) { // Add suppression fixes to the end of a given SuggestedActionSet so that they always show up last in a group. foreach (var fix in fixes) { if (fix.Action is SuppressionCodeAction) { var suggestedAction = new SuppressionSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, fix, fixCollection.Provider, getFixAllSuggestedActionSet); AddFix(fix, suggestedAction, map, order); } } } } } private static void AddFix(CodeFix fix, SuggestedAction suggestedAction, IDictionary<Diagnostic, IList<SuggestedAction>> map, IList<Diagnostic> order) { var diag = fix.PrimaryDiagnostic; if (!map.ContainsKey(diag)) { // Remember the order of the keys for the 'map' dictionary. order.Add(diag); map[diag] = ImmutableArray.CreateBuilder<SuggestedAction>(); } map[diag].Add(suggestedAction); } /// <summary> /// Return prioritized set of fix groups such that fix group for suppression always show up at the bottom of the list. /// </summary> /// <remarks> /// Fix groups are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>. /// Priority for all <see cref="SuggestedActionSet"/>s containing fixes is set to <see cref="SuggestedActionSetPriority.Medium"/> by default. /// The only exception is the case where a <see cref="SuggestedActionSet"/> only contains suppression fixes - /// the priority of such <see cref="SuggestedActionSet"/>s is set to <see cref="SuggestedActionSetPriority.None"/> so that suppression fixes /// always show up last after all other fixes (and refactorings) for the selected line of code. /// </remarks> private static IEnumerable<SuggestedActionSet> PrioritizeFixGroups(IDictionary<Diagnostic, IList<SuggestedAction>> map, IList<Diagnostic> order) { var sets = ImmutableArray.CreateBuilder<SuggestedActionSet>(); foreach (var diag in order) { var fixes = map[diag]; var priority = fixes.All(s => s is SuppressionSuggestedAction) ? SuggestedActionSetPriority.None : SuggestedActionSetPriority.Medium; var applicableToSpan = new Span(diag.Location.SourceSpan.Start, diag.Location.SourceSpan.Length); sets.Add(new SuggestedActionSet(fixes, priority, applicableToSpan)); } return sets.ToImmutable(); } private IEnumerable<SuggestedActionSet> GetRefactorings( IDocumentSupportsSuggestionService supportSuggestion, ISuggestedActionCategorySet requestedActionCategories, Workspace workspace, Document document, SnapshotSpan range, CancellationToken cancellationToken) { var optionService = workspace.Services.GetService<IOptionService>(); if (optionService.GetOption(EditorComponentOnOffOptions.CodeRefactorings) && _owner._codeRefactoringService != null && supportSuggestion.SupportsRefactorings(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring)) { // Get the selection while on the UI thread. var selection = TryGetCodeRefactoringSelection(_subjectBuffer, _textView, range); if (!selection.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return null; } var refactorings = Task.Run( async () => await _owner._codeRefactoringService.GetRefactoringsAsync( document, selection.Value, cancellationToken).ConfigureAwait(false), cancellationToken).WaitAndGetResult(cancellationToken); return refactorings.Select(r => OrganizeRefactorings(workspace, r)); } return null; } /// <summary> /// Arrange refactorings into groups. /// </summary> /// <remarks> /// Refactorings are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>. /// Priority for all <see cref="SuggestedActionSet"/>s containing refactorings is set to <see cref="SuggestedActionSetPriority.Low"/> /// and should show up after fixes but before suppression fixes in the light bulb menu. /// </remarks> private SuggestedActionSet OrganizeRefactorings(Workspace workspace, CodeRefactoring refactoring) { var refactoringSuggestedActions = ImmutableArray.CreateBuilder<SuggestedAction>(); foreach (var a in refactoring.Actions) { refactoringSuggestedActions.Add( new CodeRefactoringSuggestedAction( workspace, _subjectBuffer, _owner._editHandler, a, refactoring.Provider)); } return new SuggestedActionSet(refactoringSuggestedActions.ToImmutable(), SuggestedActionSetPriority.Low); } public async Task<bool> HasSuggestedActionsAsync(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken) { // Explicitly hold onto below fields in locals and use these locals throughout this code path to avoid crashes // if these fields happen to be cleared by Dispose() below. This is required since this code path involves // code that can run asynchronously from background thread. var view = _textView; var buffer = _subjectBuffer; var provider = _owner; if (view == null || buffer == null || provider == null) { return false; } using (var asyncToken = provider._listener.BeginAsyncOperation("HasSuggestedActionsAsync")) { var documentAndSnapshot = await GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).ConfigureAwait(false); if (!documentAndSnapshot.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return false; } var document = documentAndSnapshot.Value.Item1; var workspace = document.Project.Solution.Workspace; var supportSuggestion = workspace.Services.GetService<IDocumentSupportsSuggestionService>(); return await HasFixesAsync( supportSuggestion, requestedActionCategories, provider, document, range, cancellationToken).ConfigureAwait(false) || await HasRefactoringsAsync( supportSuggestion, requestedActionCategories, provider, document, buffer, view, range, cancellationToken).ConfigureAwait(false); } } private async Task<bool> HasFixesAsync( IDocumentSupportsSuggestionService supportSuggestion, ISuggestedActionCategorySet requestedActionCategories, SuggestedActionsSourceProvider provider, Document document, SnapshotSpan range, CancellationToken cancellationToken) { if (provider._codeFixService != null && supportSuggestion.SupportsCodeFixes(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix)) { // We only consider suppressions if lightbulb is asking for everything. // If the light bulb is only asking for code fixes, then we don't consider suppressions. var considerSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any); var result = await Task.Run( async () => await provider._codeFixService.GetFirstDiagnosticWithFixAsync( document, range.Span.ToTextSpan(), considerSuppressionFixes, cancellationToken).ConfigureAwait(false), cancellationToken).ConfigureAwait(false); if (result.HasFix) { Logger.Log(FunctionId.SuggestedActions_HasSuggestedActionsAsync); return true; } if (result.PartialResult) { // reset solution version number so that we can raise suggested action changed event Volatile.Write(ref _lastSolutionVersionReported, InvalidSolutionVersion); return false; } } return false; } private async Task<bool> HasRefactoringsAsync( IDocumentSupportsSuggestionService supportSuggestion, ISuggestedActionCategorySet requestedActionCategories, SuggestedActionsSourceProvider provider, Document document, ITextBuffer buffer, ITextView view, SnapshotSpan range, CancellationToken cancellationToken) { var optionService = document.Project.Solution.Workspace.Services.GetService<IOptionService>(); if (optionService.GetOption(EditorComponentOnOffOptions.CodeRefactorings) && provider._codeRefactoringService != null && supportSuggestion.SupportsRefactorings(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring)) { TextSpan? selection = null; if (IsForeground()) { // This operation needs to happen on UI thread because it needs to access textView.Selection. selection = TryGetCodeRefactoringSelection(buffer, view, range); } else { await InvokeBelowInputPriority(() => { // This operation needs to happen on UI thread because it needs to access textView.Selection. selection = TryGetCodeRefactoringSelection(buffer, view, range); }).ConfigureAwait(false); } if (!selection.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return false; } return await Task.Run( async () => await provider._codeRefactoringService.HasRefactoringsAsync( document, selection.Value, cancellationToken).ConfigureAwait(false), cancellationToken).ConfigureAwait(false); } return false; } private static TextSpan? TryGetCodeRefactoringSelection(ITextBuffer buffer, ITextView view, SnapshotSpan range) { var selectedSpans = view.Selection.SelectedSpans .SelectMany(ss => view.BufferGraph.MapDownToBuffer(ss, SpanTrackingMode.EdgeExclusive, buffer)) .Where(ss => !view.IsReadOnlyOnSurfaceBuffer(ss)) .ToList(); // We only support refactorings when there is a single selection in the document. if (selectedSpans.Count != 1) { return null; } var translatedSpan = selectedSpans[0].TranslateTo(range.Snapshot, SpanTrackingMode.EdgeInclusive); // We only support refactorings when selected span intersects with the span that the light bulb is asking for. if (!translatedSpan.IntersectsWith(range)) { return null; } return translatedSpan.Span.ToTextSpan(); } private static async Task<ValueTuple<Document, ITextSnapshot>?> GetMatchingDocumentAndSnapshotAsync(ITextSnapshot givenSnapshot, CancellationToken cancellationToken) { var buffer = givenSnapshot.TextBuffer; if (buffer == null) { return null; } var workspace = buffer.GetWorkspace(); if (workspace == null) { return null; } var documentId = workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()); if (documentId == null) { return null; } var document = workspace.CurrentSolution.GetDocument(documentId); if (document == null) { return null; } var sourceText = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); var snapshot = sourceText.FindCorrespondingEditorTextSnapshot(); if (snapshot == null || snapshot.Version.ReiteratedVersionNumber != givenSnapshot.Version.ReiteratedVersionNumber) { return null; } return ValueTuple.Create(document, snapshot); } private void OnTextViewClosed(object sender, EventArgs e) { Dispose(); } private void OnWorkspaceChanged(object sender, EventArgs e) { // REVIEW: this event should give both old and new workspace as argument so that // one doesnt need to hold onto workspace in field. // remove existing event registration if (_workspace != null) { _workspace.DocumentActiveContextChanged -= OnActiveContextChanged; } // REVIEW: why one need to get new workspace from registration? why not just pass in the new workspace? // add new event registration _workspace = _registration.Workspace; if (_workspace != null) { _workspace.DocumentActiveContextChanged += OnActiveContextChanged; } } private void OnActiveContextChanged(object sender, DocumentEventArgs e) { // REVIEW: it would be nice for changed event to pass in both old and new document. OnSuggestedActionsChanged(e.Document.Project.Solution.Workspace, e.Document.Id, e.Document.Project.Solution.WorkspaceVersion); } private void OnDiagnosticsUpdated(object sender, DiagnosticsUpdatedArgs e) { // document removed case. no reason to raise event if (e.Solution == null) { return; } OnSuggestedActionsChanged(e.Workspace, e.DocumentId, e.Solution.WorkspaceVersion); } private void OnSuggestedActionsChanged(Workspace currentWorkspace, DocumentId currentDocumentId, int solutionVersion, DiagnosticsUpdatedArgs args = null) { // Explicitly hold onto the _subjectBuffer field in a local and use this local in this function to avoid crashes // if this field happens to be cleared by Dispose() below. This is required since this code path involves code // that can run on background thread. var buffer = _subjectBuffer; if (buffer == null) { return; } var workspace = buffer.GetWorkspace(); // workspace is not ready, nothing to do. if (workspace == null || workspace != currentWorkspace) { return; } if (currentDocumentId != workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()) || solutionVersion == Volatile.Read(ref _lastSolutionVersionReported)) { return; } // make sure we only raise event once for same solution version. // light bulb controller will call us back to find out new information var changed = this.SuggestedActionsChanged; if (changed != null) { changed(this, EventArgs.Empty); } Volatile.Write(ref _lastSolutionVersionReported, solutionVersion); } public void Dispose() { if (_owner != null) { var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService; updateSource.DiagnosticsUpdated -= OnDiagnosticsUpdated; _owner = null; } if (_workspace != null) { _workspace.DocumentActiveContextChanged -= OnActiveContextChanged; _workspace = null; } if (_registration != null) { _registration.WorkspaceChanged -= OnWorkspaceChanged; _registration = null; } if (_textView != null) { _textView.Closed -= OnTextViewClosed; _textView = null; } if (_subjectBuffer != null) { _subjectBuffer = null; } } } } }
using System; using NUnit.Framework; using StructureMap.Configuration.DSL; using StructureMap.Interceptors; using StructureMap.Testing.Widget3; namespace StructureMap.Testing.Configuration.DSL { [TestFixture] public class InterceptAllInstancesOfPluginTypeTester : Registry { #region Setup/Teardown [SetUp] public void SetUp() { _lastService = null; _manager = null; _defaultRegistry = (registry => { //registry.ForRequestedType<IService>() // .AddInstances( // Instance<ColorService>().WithName("Red").WithProperty("color"). // EqualTo( // "Red"), // Object<IService>(new ColorService("Yellow")).WithName("Yellow"), // ConstructedBy<IService>( // delegate { return new ColorService("Purple"); }) // .WithName("Purple"), // Instance<ColorService>().WithName("Decorated").WithProperty("color") // . // EqualTo("Orange") // ); registry.ForRequestedType<IService>().AddInstances(x => { x.OfConcreteType<ColorService>().WithName("Red").WithProperty("color").EqualTo("Red"); x.Object(new ColorService("Yellow")).WithName("Yellow"); x.ConstructedBy(() => new ColorService("Purple")).WithName("Purple"); x.OfConcreteType<ColorService>().WithName("Decorated").WithProperty("color").EqualTo("Orange"); }); }); } #endregion private IService _lastService; private IContainer _manager; private Action<Registry> _defaultRegistry; private IService getService(string name, Action<Registry> action) { if (_manager == null) { _manager = new Container(registry => { _defaultRegistry(registry); action(registry); }); } return _manager.GetInstance<IService>(name); } public class MockInterceptor : InstanceInterceptor { public object Target { get; set; } public object Process(object target, IContext context) { Target = target; return target; } } [Test] public void custom_interceptor_for_all() { var interceptor = new MockInterceptor(); IService service = getService("Green", r => { r.ForRequestedType<IService>().InterceptWith(interceptor) .AddInstances(x => { x.ConstructedBy(() => new ColorService("Green")).WithName("Green"); }); }); interceptor.Target.ShouldBeTheSameAs(service); } [Test] public void EnrichForAll() { IService green = getService("Green", r => { r.ForRequestedType<IService>().EnrichWith(s => new DecoratorService(s)) .AddInstances(x => { x.ConstructedBy(() => new ColorService("Green")).WithName("Green"); }); }); green.ShouldBeOfType<DecoratorService>() .Inner.ShouldBeOfType<ColorService>().Color.ShouldEqual("Green"); } [Test] public void OnStartupForAll() { Action<Registry> action = registry => { registry.ForRequestedType<IService>().OnCreation(s => _lastService = s) .AddInstances(x => { x.ConstructedBy(() => new ColorService("Green")).WithName("Green"); }); }; IService red = getService("Red", action); Assert.AreSame(red, _lastService); IService purple = getService("Purple", action); Assert.AreSame(purple, _lastService); IService green = getService("Green", action); Assert.AreSame(green, _lastService); IService yellow = getService("Yellow", action); Assert.AreEqual(yellow, _lastService); } } [TestFixture] public class InterceptAllInstancesOfPluginTypeTester_with_SmartInstance : Registry { #region Setup/Teardown [SetUp] public void SetUp() { _lastService = null; _manager = null; _defaultRegistry = (registry => registry.ForRequestedType<IService>().AddInstances(x => { x.OfConcreteType<ColorService>().WithName("Red") .WithCtorArg("color").EqualTo("Red"); x.Object(new ColorService("Yellow")).WithName("Yellow"); x.ConstructedBy(() => new ColorService("Purple")).WithName("Purple"); x.OfConcreteType<ColorService>().WithName("Decorated").WithCtorArg("color").EqualTo( "Orange"); })); } #endregion private IService _lastService; private IContainer _manager; private Action<Registry> _defaultRegistry; private IService getService(Action<Registry> action, string name) { if (_manager == null) { _manager = new Container(registry => { _defaultRegistry(registry); action(registry); }); } return _manager.GetInstance<IService>(name); } [Test] public void EnrichForAll() { Action<Registry> action = r => { r.ForRequestedType<IService>().EnrichWith(s => new DecoratorService(s)) .AddInstances(x => { x.ConstructedBy(() => new ColorService("Green")).WithName("Green"); }); }; IService green = getService(action, "Green"); var decoratorService = (DecoratorService) green; var innerService = (ColorService) decoratorService.Inner; Assert.AreEqual("Green", innerService.Color); } [Test] public void OnStartupForAll() { Action<Registry> action = r => { r.ForRequestedType<IService>().OnCreation(s => _lastService = s) .AddInstances(x => { x.ConstructedBy(() => new ColorService("Green")).WithName("Green"); }); }; IService red = getService(action, "Red"); Assert.AreSame(red, _lastService); IService purple = getService(action, "Purple"); Assert.AreSame(purple, _lastService); IService green = getService(action, "Green"); Assert.AreSame(green, _lastService); IService yellow = getService(action, "Yellow"); Assert.AreEqual(yellow, _lastService); } } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Chronos.Controls; using UnityEditor; using UnityEngine; using UnityObject = UnityEngine.Object; namespace Chronos.Reflection { [CustomPropertyDrawer(typeof(UnityMember))] public abstract class UnityMemberDrawer<TMember> : TargetedDrawer where TMember : UnityMember { #region Fields /// <summary> /// The filter attribute on the inspected field. /// </summary> protected FilterAttribute filter; /// <summary> /// The inspected property, of type UnityMember. /// </summary> protected SerializedProperty property; /// <summary> /// The UnityMember.component of the inspected property, of type string. /// </summary> protected SerializedProperty componentProperty; /// <summary> /// The UnityMember.name of the inspected property, of type string. /// </summary> protected SerializedProperty nameProperty; /// <summary> /// The targeted Unity Objects. /// </summary> protected UnityObject[] targets; /// <summary> /// The type of targeted objects. /// </summary> protected UnityObjectType targetType; #endregion /// <inheritdoc /> protected override void Update(SerializedProperty property) { // Update the targeted drawer base.Update(property); // Assign the property and sub-properties this.property = property; componentProperty = property.FindPropertyRelative("_component"); nameProperty = property.FindPropertyRelative("_name"); // Fetch the filter filter = (FilterAttribute)fieldInfo.GetCustomAttributes(typeof(FilterAttribute), true).FirstOrDefault() ?? DefaultFilter(); // Find the targets targets = FindTargets(); targetType = DetermineTargetType(); } /// <inheritdoc /> protected override void RenderMemberControl(Rect position) { // Other Targets // Some Unity Objects, like Assets, are not supported by the drawer. // Just display an error message to let the user change their target. if (targetType == UnityObjectType.Other) { EditorGUI.HelpBox(position, "Unsupported Unity Object type.", MessageType.None); return; } // Display a list of all available reflected members in a popup. var options = new List<PopupOption<TMember>>(); TMember value = GetValue(); PopupOption<TMember> selectedOption = null; PopupOption<TMember> noneOption = new PopupOption<TMember>(null, string.Format("No {0}", memberLabel)); if (targetType == UnityObjectType.GameObject) { // Check if all targets have a GameObject (none are empty). // If they do, display all members of the GameObject type. if (HasSharedGameObject()) { var gameObjectOptions = GetMemberOptions(typeof(GameObject)); foreach (var gameObjectOption in gameObjectOptions) { // Prefix label by GameObject for popup clarity. gameObjectOption.label = string.Format("GameObject/{0}", gameObjectOption.label); options.Add(gameObjectOption); } } // Find all shared component types across targets. // Display all members of each one found. foreach (Type componentType in GetSharedComponentTypes()) { var componentOptions = GetMemberOptions(componentType, componentType.Name); foreach (var componentOption in componentOptions) { // Prefix label and option by component type for clear distinction. componentOption.label = string.Format("{0}/{1}", componentType.Name, componentOption.label); options.Add(componentOption); } } // Determine which option is currently selected. if (value != null) { string label; if (value.component == null) { label = string.Format("GameObject.{0}", value.name); } else { label = string.Format("{0}.{1}", value.component, value.name); } UnityMethod method = value as UnityMethod; if (method != null) { string parameterString = string.Join(", ", method.parameterTypes.Select(t => t.PrettyName()).ToArray()); label += string.Format(" ({0})", parameterString); } TMember valueInOptions = options.Select(option => option.value).FirstOrDefault(member => member.Corresponds(value)); if (valueInOptions != null) { selectedOption = new PopupOption<TMember>(valueInOptions, label); } else { selectedOption = new PopupOption<TMember>(value, label); } } } else if (targetType == UnityObjectType.ScriptableObject) { // ScriptableObject Target // Make sure all targets share the same ScriptableObject Type. // If they do, display all members of that type. Type scriptableObjectType = GetSharedScriptableObjectType(); if (scriptableObjectType != null) { options.AddRange(GetMemberOptions(scriptableObjectType)); // Determine which option is currently selected. if (value != null) { selectedOption = options.Find(o => o.value.Corresponds(value)); if (selectedOption == null) { selectedOption = new PopupOption<TMember>(value, value.name); } } } } // Make sure the callback uses the property of this drawer, not at its later value. var propertyNow = property; bool enabled = targetType != UnityObjectType.None; if (!enabled) EditorGUI.BeginDisabledGroup(true); PopupGUI<TMember>.Render ( position, newValue => { Update(propertyNow); SetValue(newValue); propertyNow.serializedObject.ApplyModifiedProperties(); }, options, selectedOption, noneOption, hasMultipleDifferentValues ); if (!enabled) EditorGUI.EndDisabledGroup(); } #region Value /// <summary> /// Constructs a new instance of the member from the specified component and name. /// </summary> protected abstract TMember BuildValue(string component, string name); /// <summary> /// Returns a member constructed from the current parameter values. /// </summary> /// <returns></returns> protected TMember GetValue() { if (hasMultipleDifferentValues || string.IsNullOrEmpty(nameProperty.stringValue)) { return null; } string component = componentProperty.stringValue; string name = nameProperty.stringValue; if (component == string.Empty) component = null; if (name == string.Empty) name = null; return BuildValue(component, name); } /// <summary> /// Assigns the property values from a specified member. /// </summary> protected virtual void SetValue(TMember value) { if (value != null) { componentProperty.stringValue = value.component; nameProperty.stringValue = value.name; } else { componentProperty.stringValue = null; nameProperty.stringValue = null; } } /// <summary> /// Indicated whether the property has multiple different values. /// </summary> protected virtual bool hasMultipleDifferentValues { get { return componentProperty.hasMultipleDifferentValues || nameProperty.hasMultipleDifferentValues; } } #endregion #region Targeting /// <summary> /// Get the list of targets on the inspected objects. /// </summary> protected UnityObject[] FindTargets() { if (isSelfTargeted) { // In self targeting mode, the targets are the inspected objects themselves. return property.serializedObject.targetObjects; } else { // In manual targeting mode, the targets the values of each target property. return targetProperty.Multiple().Select(p => p.objectReferenceValue).ToArray(); } } /// <summary> /// Determine the Unity type of the targets. /// </summary> protected UnityObjectType DetermineTargetType() { UnityObjectType unityObjectType = UnityObjectType.None; foreach (UnityObject targetObject in targets) { // Null (non-specified) targets don't affect the type // If no non-null target is specified, the type will be None // as the loop will simply exit. if (targetObject == null) { continue; } if (targetObject is GameObject || targetObject is Component) { // For GameObjects and Components, the target is either the // GameObject itself, or the one to which the Component belongs. // If a ScriptableObject target was previously found, // return that the targets are of mixed types. if (unityObjectType == UnityObjectType.ScriptableObject) { return UnityObjectType.Mixed; } unityObjectType = UnityObjectType.GameObject; } else if (targetObject is ScriptableObject) { // For ScriptableObjects, the target is simply the // ScriptableObject itself. // If a GameObject target was previously found, // return that the targets are of mixed types. if (unityObjectType == UnityObjectType.GameObject) { return UnityObjectType.Mixed; } unityObjectType = UnityObjectType.ScriptableObject; } else { // Other target types return UnityObjectType.Other; } } return unityObjectType; } /// <summary> /// Determines if the targets all share a GameObject. /// </summary> public bool HasSharedGameObject() { return !targets.Contains(null); } /// <summary> /// Determines which types of Components are shared on all GameObject targets. /// </summary> protected IEnumerable<Type> GetSharedComponentTypes() { if (targets.Contains(null)) { return Enumerable.Empty<Type>(); } var childrenComponents = targets.OfType<GameObject>().Select(gameObject => gameObject.GetComponents<Component>().Where(c => c != null)); var siblingComponents = targets.OfType<Component>().Select(component => component.GetComponents<Component>().Where(c => c != null)); return childrenComponents.Concat(siblingComponents) .Select(components => components.Select(component => component.GetType())) .IntersectAll() .Distinct(); } /// <summary> /// Determines which type of ScriptableObject is shared across targets. /// Returns null if none are shared. /// </summary> protected Type GetSharedScriptableObjectType() { if (targets.Contains(null)) { return null; } return targets .OfType<ScriptableObject>() .Select(scriptableObject => scriptableObject.GetType()) .Distinct() .SingleOrDefault(); // Null (default) if multiple or zero } #endregion #region Reflection /// <summary> /// Gets the list of members available on a type as popup options. /// </summary> protected List<PopupOption<TMember>> GetMemberOptions(Type type, string component = null) { return type .GetMembers(validBindingFlags) .Where(member => validMemberTypes.HasFlag(member.MemberType)) .Where(ValidateMember) .Select(member => GetMemberOption(member, component)) .ToList(); } protected abstract PopupOption<TMember> GetMemberOption(MemberInfo member, string component); #endregion #region Filtering /// <summary> /// The label of a member, displayed in options. /// </summary> protected virtual string memberLabel { get { return "Member"; } } /// <summary> /// The default applied filter attribute if none is specified. /// </summary> protected virtual FilterAttribute DefaultFilter() { return new FilterAttribute(); } /// <summary> /// The valid BindingFlags when looking for reflected members. /// </summary> protected virtual BindingFlags validBindingFlags { get { // Build the flags from the filter attribute BindingFlags flags = (BindingFlags)0; if (filter.Public) flags |= BindingFlags.Public; if (filter.NonPublic) flags |= BindingFlags.NonPublic; if (filter.Instance) flags |= BindingFlags.Instance; if (filter.Static) flags |= BindingFlags.Static; if (!filter.Inherited) flags |= BindingFlags.DeclaredOnly; if (filter.Static && filter.Inherited) flags |= BindingFlags.FlattenHierarchy; return flags; } } /// <summary> /// The valid MemberTypes when looking for reflected members. /// </summary> protected virtual MemberTypes validMemberTypes { get { return MemberTypes.All; } } /// <summary> /// Determines whether a given MemberInfo should be included in the options. /// This check follows the BindingFlags and MemberTypes filtering. /// </summary> protected virtual bool ValidateMember(MemberInfo member) { return true; } /// <summary> /// Determines whether a MemberInfo of the given type should be included in the options. /// </summary> protected virtual bool ValidateMemberType(Type type) { bool validFamily = false; bool validType; // Allow type families based on the filter attribute TypeFamily families = filter.TypeFamilies; if (families.HasFlag(TypeFamily.Array)) validFamily |= type.IsArray; if (families.HasFlag(TypeFamily.Class)) validFamily |= type.IsClass; if (families.HasFlag(TypeFamily.Enum)) validFamily |= type.IsEnum; if (families.HasFlag(TypeFamily.Interface)) validFamily |= type.IsInterface; if (families.HasFlag(TypeFamily.Primitive)) validFamily |= type.IsPrimitive; if (families.HasFlag(TypeFamily.Reference)) validFamily |= !type.IsValueType; if (families.HasFlag(TypeFamily.Value)) validFamily |= (type.IsValueType && type != typeof(void)); if (families.HasFlag(TypeFamily.Void)) validFamily |= type == typeof(void); // Allow types based on the filter attribute // If no filter types are specified, all types are allowed. if (filter.Types.Count > 0) { validType = false; foreach (Type allowedType in filter.Types) { if (allowedType.IsAssignableFrom(type)) { validType = true; break; } } } else { validType = true; } return validFamily && validType; } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IdentityModel.Tokens; using System.ServiceModel; using Microsoft.Xml; namespace System.IdentityModel.Selectors { /// <summary> /// SecurityTokenSerializer is responsible for writing and reading SecurityKeyIdentifiers, SecurityKeyIdentifierClauses and SecurityTokens. /// In order to read SecurityTokens the SecurityTokenSerializer may need to resolve token references using the SecurityTokenResolvers that get passed in. /// The SecurityTokenSerializer is stateless /// Exceptions: XmlException, SecurityTokenException, NotSupportedException, InvalidOperationException, ArgumentException /// </summary> public abstract class SecurityTokenSerializer { // public methods public bool CanReadToken(XmlReader reader) { if (reader == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); } return CanReadTokenCore(reader); } public bool CanWriteToken(SecurityToken token) { if (token == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("token"); } return CanWriteTokenCore(token); } public bool CanReadKeyIdentifier(XmlReader reader) { if (reader == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); } return CanReadKeyIdentifierCore(reader); } public bool CanWriteKeyIdentifier(SecurityKeyIdentifier keyIdentifier) { if (keyIdentifier == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("keyIdentifier"); } return CanWriteKeyIdentifierCore(keyIdentifier); } public bool CanReadKeyIdentifierClause(XmlReader reader) { if (reader == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); } return CanReadKeyIdentifierClauseCore(reader); } public bool CanWriteKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause) { if (keyIdentifierClause == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("keyIdentifierClause"); } return CanWriteKeyIdentifierClauseCore(keyIdentifierClause); } public SecurityToken ReadToken(XmlReader reader, SecurityTokenResolver tokenResolver) { if (reader == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); } return ReadTokenCore(reader, tokenResolver); } public void WriteToken(XmlWriter writer, SecurityToken token) { if (writer == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer"); } if (token == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("token"); } WriteTokenCore(writer, token); } public SecurityKeyIdentifier ReadKeyIdentifier(XmlReader reader) { if (reader == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); } return ReadKeyIdentifierCore(reader); } public void WriteKeyIdentifier(XmlWriter writer, SecurityKeyIdentifier keyIdentifier) { if (writer == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer"); } if (keyIdentifier == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("keyIdentifier"); } WriteKeyIdentifierCore(writer, keyIdentifier); } public SecurityKeyIdentifierClause ReadKeyIdentifierClause(XmlReader reader) { if (reader == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); } return ReadKeyIdentifierClauseCore(reader); } public void WriteKeyIdentifierClause(XmlWriter writer, SecurityKeyIdentifierClause keyIdentifierClause) { if (writer == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("writer"); } if (keyIdentifierClause == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("keyIdentifierClause"); } WriteKeyIdentifierClauseCore(writer, keyIdentifierClause); } // protected abstract methods protected abstract bool CanReadTokenCore(XmlReader reader); protected abstract bool CanWriteTokenCore(SecurityToken token); protected abstract bool CanReadKeyIdentifierCore(XmlReader reader); protected abstract bool CanWriteKeyIdentifierCore(SecurityKeyIdentifier keyIdentifier); protected abstract bool CanReadKeyIdentifierClauseCore(XmlReader reader); protected abstract bool CanWriteKeyIdentifierClauseCore(SecurityKeyIdentifierClause keyIdentifierClause); protected abstract SecurityToken ReadTokenCore(XmlReader reader, SecurityTokenResolver tokenResolver); protected abstract void WriteTokenCore(XmlWriter writer, SecurityToken token); protected abstract SecurityKeyIdentifier ReadKeyIdentifierCore(XmlReader reader); protected abstract void WriteKeyIdentifierCore(XmlWriter writer, SecurityKeyIdentifier keyIdentifier); protected abstract SecurityKeyIdentifierClause ReadKeyIdentifierClauseCore(XmlReader reader); protected abstract void WriteKeyIdentifierClauseCore(XmlWriter writer, SecurityKeyIdentifierClause keyIdentifierClause); internal abstract class KeyIdentifierClauseEntry { protected abstract XmlDictionaryString LocalName { get; } protected abstract XmlDictionaryString NamespaceUri { get; } public virtual bool CanReadKeyIdentifierClauseCore(XmlDictionaryReader reader) { return reader.IsStartElement(this.LocalName, this.NamespaceUri); } public abstract SecurityKeyIdentifierClause ReadKeyIdentifierClauseCore(XmlDictionaryReader reader); public abstract bool SupportsCore(SecurityKeyIdentifierClause keyIdentifierClause); public abstract void WriteKeyIdentifierClauseCore(XmlDictionaryWriter writer, SecurityKeyIdentifierClause keyIdentifierClause); } internal abstract class StrEntry { public abstract string GetTokenTypeUri(); public abstract Type GetTokenType(SecurityKeyIdentifierClause clause); public abstract bool CanReadClause(XmlDictionaryReader reader, string tokenType); public abstract SecurityKeyIdentifierClause ReadClause(XmlDictionaryReader reader, byte[] derivationNonce, int derivationLength, string tokenType); public abstract bool SupportsCore(SecurityKeyIdentifierClause clause); public abstract void WriteContent(XmlDictionaryWriter writer, SecurityKeyIdentifierClause clause); } internal abstract class SerializerEntries { public virtual void PopulateTokenEntries(IList<TokenEntry> tokenEntries) { } public virtual void PopulateKeyIdentifierEntries(IList<KeyIdentifierEntry> keyIdentifierEntries) { } public virtual void PopulateKeyIdentifierClauseEntries(IList<KeyIdentifierClauseEntry> keyIdentifierClauseEntries) { } public virtual void PopulateStrEntries(IList<StrEntry> strEntries) { } } internal abstract class KeyIdentifierEntry { protected abstract XmlDictionaryString LocalName { get; } protected abstract XmlDictionaryString NamespaceUri { get; } public virtual bool CanReadKeyIdentifierCore(XmlDictionaryReader reader) { return reader.IsStartElement(this.LocalName, this.NamespaceUri); } public abstract SecurityKeyIdentifier ReadKeyIdentifierCore(XmlDictionaryReader reader); public abstract bool SupportsCore(SecurityKeyIdentifier keyIdentifier); public abstract void WriteKeyIdentifierCore(XmlDictionaryWriter writer, SecurityKeyIdentifier keyIdentifier); } internal abstract class TokenEntry { private Type[] _tokenTypes = null; protected abstract XmlDictionaryString LocalName { get; } protected abstract XmlDictionaryString NamespaceUri { get; } public Type TokenType { get { return GetTokenTypes()[0]; } } public abstract string TokenTypeUri { get; } protected abstract string ValueTypeUri { get; } public bool SupportsCore(Type tokenType) { Type[] tokenTypes = GetTokenTypes(); for (int i = 0; i < tokenTypes.Length; ++i) { if (tokenTypes[i].IsAssignableFrom(tokenType)) return true; } return false; } protected abstract Type[] GetTokenTypesCore(); public Type[] GetTokenTypes() { if (_tokenTypes == null) _tokenTypes = GetTokenTypesCore(); return _tokenTypes; } public virtual bool SupportsTokenTypeUri(string tokenTypeUri) { return (this.TokenTypeUri == tokenTypeUri); } } } }
/* * Copyright (c) 2015, InWorldz Halcyon Developers * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of halcyon nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using OpenSim.Framework; using System.IO; using OpenMetaverse; using InWorldz.Phlox.VM; using log4net; using System.Reflection; using System.Diagnostics; using InWorldz.Phlox.Serialization; using OpenSim.Region.ScriptEngine.Shared.Api; namespace InWorldz.Phlox.Engine { /// <summary> /// Loads a script. Searches for it from the most efficient source (bytecode already in memory) /// to the least efficient source (asset request and compilation) /// </summary> internal class ScriptLoader { private static readonly ILog _log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private IAssetCache _assetCache; private ExecutionScheduler _exeScheduler; private StateManager _stateManager; public StateManager StateManager { get { return _stateManager; } set { _stateManager = value; } } /// <summary> /// A script that is loaded into memory with a reference count /// </summary> private class LoadedScript { public CompiledScript Script; public int RefCount; } /// <summary> /// Scripts loaded with the script asset UUID as the key /// </summary> private Dictionary<UUID, LoadedScript> _loadedScripts = new Dictionary<UUID, LoadedScript>(); /// <summary> /// Scripts waiting to be loaded or unloaded, accessing this list must be done with a lock as requests /// come from outside of the engine thread /// </summary> private LinkedList<LoadUnloadRequest> _outstandingLoadUnloadRequests = new LinkedList<LoadUnloadRequest>(); /// <summary> /// Locks the _waitingForAssetServer list and the _waitingForCompilation queue /// </summary> private object _assetAndCompileLock = new object(); /// <summary> /// Load requests waiting on the asset server for the script /// </summary> private Dictionary<UUID, List<LoadUnloadRequest>> _waitingForAssetServer = new Dictionary<UUID, List<LoadUnloadRequest>>(); /// <summary> /// Called after an asset has been retrieved from the asset server to wake up the master scheduler /// </summary> private WorkArrivedDelegate _workArrived; private class LoadedAsset { public uint LocalId; public UUID ItemId; public UUID AssetId; public string ScriptText; public List<LoadUnloadRequest> Requests; } /// <summary> /// List of assets waiting for compilation /// </summary> private Queue<LoadedAsset> _waitingForCompilation = new Queue<LoadedAsset>(); /// <summary> /// Used to measure compilation time /// </summary> private Stopwatch _stopwatch = new Stopwatch(); /// <summary> /// The parent engine interface /// </summary> private EngineInterface _engineInterface; /// <summary> /// Requests for loaded script bytecode get queued here. This list must use locking because /// the requests come from outside of the engine thread /// </summary> private Queue<RetrieveBytecodeRequest> _outstandingBytecodeRequests = new Queue<RetrieveBytecodeRequest>(); public ScriptLoader(IAssetCache assetCache, ExecutionScheduler exeScheduler, WorkArrivedDelegate workArrived, EngineInterface engineInterface) { _assetCache = assetCache; _exeScheduler = exeScheduler; _workArrived = workArrived; _engineInterface = engineInterface; this.CreateDirectoryStructure(); } private void CreateDirectoryStructure() { Directory.CreateDirectory(PhloxConstants.COMPILE_CACHE_DIR); } public void PostLoadUnloadRequest(LoadUnloadRequest req) { lock (_outstandingLoadUnloadRequests) { _outstandingLoadUnloadRequests.AddFirst(req); } _workArrived(); } public WorkStatus DoWork() { //do we have outstanding unload requests? bool lrqStatus = CheckAndPerformLoadUnloadRequest(); //do we have outstanding asset responses? bool compStatus = CheckAndCompileScript(); //how about requests for compiled script assets? bool byteCodeReqStatus = CheckAndRetrieveBytecodes(); return new WorkStatus { WorkWasDone = lrqStatus || compStatus || byteCodeReqStatus, WorkIsPending = this.WorkIsPending(), NextWakeUpTime = UInt64.MaxValue }; } // Returns true if it found the script loaded and decremented the refcount. private bool PerformUnloadRequest(LoadUnloadRequest unloadReq) { bool rc = false; // Find based on the item ID VM.Interpreter loadedScript = _exeScheduler.FindScript(unloadReq.ItemId); if (loadedScript != null) { LoadedScript reffedScript; //tell the scheduler the script needs to be pulled _exeScheduler.DoUnload(unloadReq.ItemId, unloadReq); //tell the async command manager that it needs to remove this script AsyncCommandManager.RemoveScript(_engineInterface, unloadReq.LocalId, unloadReq.ItemId); //tell the state manager to remove this script _stateManager.ScriptUnloaded(unloadReq.ItemId); //decref and unload if refcount is 0 based on the Asset ID if (_loadedScripts.TryGetValue(loadedScript.Script.AssetId, out reffedScript)) { if (--reffedScript.RefCount == 0) { _loadedScripts.Remove(loadedScript.Script.AssetId); //_log.InfoFormat("[Phlox]: Script {0} unloaded", loadedScript.Script.AssetId); } /* else { _log.DebugFormat("[Phlox]: Decremented reference on script {0} {1} refs remain", loadedScript.Script.AssetId, reffedScript.RefCount); }*/ rc = true; } // Callback here because if the Item ID was not found, the callback would be meaningless if (unloadReq.PostUnloadCallback != null) { // Now call the completion callback (e.g. now that it is safe for the script to be removed in the delete case). unloadReq.PostUnloadCallback(unloadReq.Prim, unloadReq.ItemId, unloadReq.CallbackParams.AllowedDrop, unloadReq.CallbackParams.FireEvents, unloadReq.CallbackParams.ReplaceArgs); } } return rc; } private bool CheckAndCompileScript() { LoadedAsset loadedScript; lock (_assetAndCompileLock) { if (_waitingForCompilation.Count == 0) { return false; } loadedScript = _waitingForCompilation.Dequeue(); } List<Types.ILSLListener> subListeners = new List<Types.ILSLListener>(); foreach (LoadUnloadRequest request in loadedScript.Requests) { if (request.Listener != null) { subListeners.Add(new CompilationListenerAdaptor(request.Listener)); } } //perform compilation Glue.CompilerFrontend frontend; if (subListeners.Count == 0) { frontend = new Glue.CompilerFrontend(new LogOutputListener(loadedScript.Requests), "."); } else { frontend = new Glue.CompilerFrontend(new MulticastCompilerListener(subListeners), "."); } try { _stopwatch.Start(); CompiledScript comp = frontend.Compile(loadedScript.ScriptText); _stopwatch.Stop(); if (comp != null) { comp.AssetId = loadedScript.AssetId; //save the script this.CacheCompiledScript(comp); _log.InfoFormat("[Phlox]: Compiled script {0} ({1}ms) in {2}", loadedScript.AssetId, _stopwatch.ElapsedMilliseconds, loadedScript.LocalId); foreach (LoadUnloadRequest request in loadedScript.Requests) { this.BeginScriptRun(request, comp); } _loadedScripts[comp.AssetId] = new LoadedScript { Script = comp, RefCount = loadedScript.Requests.Count }; } else { _log.ErrorFormat("[Phlox]: Compilation failed for {0} item {1} in {2}", loadedScript.AssetId, loadedScript.ItemId, loadedScript.LocalId); } return true; } catch (Exception e) { _log.ErrorFormat("[Phlox]: Exception while compiling {0} item {1} in {2}: {3}", loadedScript.AssetId, loadedScript.ItemId, loadedScript.LocalId, e); } finally { frontend.Listener.CompilationFinished(); _stopwatch.Reset(); } return false; } private void CacheCompiledScript(CompiledScript comp) { string scriptCacheDir = this.LookupDirectoryFromId(PhloxConstants.COMPILE_CACHE_DIR, comp.AssetId); Directory.CreateDirectory(scriptCacheDir); string scriptPath = Path.Combine(scriptCacheDir, comp.AssetId.ToString() + PhloxConstants.COMPILED_SCRIPT_EXTENSION); SerializedScript script = SerializedScript.FromCompiledScript(comp); using (FileStream f = File.Open(scriptPath, FileMode.Create)) { ProtoBuf.Serializer.Serialize(f, script); f.Close(); } } private bool CheckAndPerformLoadUnloadRequest() { LinkedListNode<LoadUnloadRequest> lrq; lock (_outstandingLoadUnloadRequests) { if (_outstandingLoadUnloadRequests.Count > 0) { lrq = _outstandingLoadUnloadRequests.First; _outstandingLoadUnloadRequests.RemoveFirst(); } else { return false; } } switch (lrq.Value.RequestType) { case LoadUnloadRequest.LUType.Load: return this.PerformLoadRequest(lrq.Value); case LoadUnloadRequest.LUType.Unload: return this.PerformUnloadRequest(lrq.Value); case LoadUnloadRequest.LUType.Reload: return this.PerformReloadRequest(lrq.Value); } return false; } private bool PerformReloadRequest(LoadUnloadRequest loadUnloadRequest) { return this.PerformLoadRequest(loadUnloadRequest); } private bool PerformLoadRequest(LoadUnloadRequest lrq) { //look up the asset id UUID scriptAssetId = this.FindAssetId(lrq); if (scriptAssetId != UUID.Zero) { try { //we try to load a script the most efficient way possible. //these if statements are ordered from most efficient to least if (TryStartSharedScript(scriptAssetId, lrq)) { return true; } else if (TryStartScriptFromSerializedData(scriptAssetId, lrq)) { return true; } else if (TryStartCachedScript(scriptAssetId, lrq)) { return true; } else { SubmitAssetLoadRequest(lrq); return true; } } catch (LoaderException e) { _log.ErrorFormat("[Phlox]: Could not load script: " + e.Message); } } return false; } private bool TryStartScriptFromSerializedData(UUID scriptAssetId, LoadUnloadRequest lrq) { if (lrq.Prim.SerializedScriptByteCode == null) return false; byte[] serializedCompiledScript; if (lrq.Prim.SerializedScriptByteCode.TryGetValue(scriptAssetId, out serializedCompiledScript)) { //deserialize and load using (MemoryStream ms = new MemoryStream(serializedCompiledScript)) { Serialization.SerializedScript script = ProtoBuf.Serializer.Deserialize<Serialization.SerializedScript>(ms); if (script == null) { _log.ErrorFormat("[Phlox]: LOADER: Script data contained in prim failed to deserialize"); ClearSerializedScriptData(lrq, scriptAssetId); return false; } else { CompiledScript compiledScript = script.ToCompiledScript(); _log.InfoFormat("[Phlox]: Starting contained script {0} in item {1} group {2} part {3}", scriptAssetId, lrq.ItemId, lrq.Prim.ParentGroup.LocalId, lrq.Prim.LocalId); BeginScriptRun(lrq, compiledScript); _loadedScripts[scriptAssetId] = new LoadedScript { Script = compiledScript, RefCount = 1 }; return true; } } } return false; } private bool SubmitAssetLoadRequest(LoadUnloadRequest lrq) { UUID scriptAssetId = this.FindAssetId(lrq); if (scriptAssetId != UUID.Zero) { if (AddAssetWait(scriptAssetId, lrq)) { _assetCache.GetAsset(scriptAssetId, delegate(UUID i, AssetBase a) { this.AssetReceived(lrq.Prim.LocalId, lrq.ItemId, i, a); }, AssetRequestInfo.InternalRequest()); } return true; } return false; } /// <summary> /// Tracks a wait on the asset server /// </summary> /// <param name="scriptAssetId"></param> /// <param name="lrq"></param> /// <returns>True if a request should be sent, false if not (already in progress)</returns> private bool AddAssetWait(UUID scriptAssetId, LoadUnloadRequest lrq) { lock (_assetAndCompileLock) { List<LoadUnloadRequest> waitingRequests; if (_waitingForAssetServer.TryGetValue(scriptAssetId, out waitingRequests)) { //we already have another script waiting for load with the same UUID, //add this one to the waiting list waitingRequests.Add(lrq); return false; } else { //no one waiting for this asset yet, create a new entry _waitingForAssetServer.Add(scriptAssetId, new List<LoadUnloadRequest>() { lrq }); return true; } } } private void AssetReceived(uint localId, UUID itemId, UUID assetId, AssetBase asset) { lock (_assetAndCompileLock) { List<LoadUnloadRequest> waitingRequests; if (_waitingForAssetServer.TryGetValue(assetId, out waitingRequests)) { _waitingForAssetServer.Remove(assetId); if (asset == null) { _log.ErrorFormat("[Phlox]: Asset not found for script {0}", assetId); return; } //we have the asset, verify it, and signal that work has arrived if (asset.Type != (sbyte)AssetType.LSLText) { _log.ErrorFormat("[Phlox]: Invalid asset type received from asset server. " + "Expected LSLText, got {0}", asset.Type); return; } string scriptText = OpenMetaverse.Utils.BytesToString(asset.Data); _waitingForCompilation.Enqueue(new LoadedAsset { LocalId = localId, ItemId = itemId, AssetId = assetId, Requests = waitingRequests, ScriptText = scriptText }); _workArrived(); } else { _log.ErrorFormat("[Phlox]: Received an asset for a script im not waiting for {0}", assetId); } } } /// <summary> /// Try to load a script from disk and start it up /// </summary> /// <param name="scriptAssetId"></param> /// <param name="lrq"></param> /// <returns></returns> private bool TryStartCachedScript(UUID scriptAssetId, LoadUnloadRequest lrq) { //check in the cache directory for compiled scripts if (ScriptIsCached(scriptAssetId)) { CompiledScript script = LoadScriptFromDisk(scriptAssetId); _log.InfoFormat("[Phlox]: Starting cached script {0} in item {1} owner {2} part {3}", scriptAssetId, lrq.ItemId, lrq.Prim.ParentGroup.OwnerID, lrq.Prim.LocalId); BeginScriptRun(lrq, script); _loadedScripts[scriptAssetId] = new LoadedScript { Script = script, RefCount = 1 }; return true; } return false; } private void BeginScriptRun(LoadUnloadRequest lrq, CompiledScript script) { RuntimeState state = this.TryLoadState(lrq); //if this is a reload, we unload first if (lrq.RequestType == LoadUnloadRequest.LUType.Reload) { this.PerformUnloadRequest(lrq); } try { _exeScheduler.FinishedLoading(lrq, script, state); } catch (Exception e) { _log.ErrorFormat("[Phlox]: Error when informing scheduler of script load. Script: {0} Item: {1} Group: {2} Part: {3}. {4}", script.AssetId, lrq.ItemId, lrq.Prim.ParentGroup.LocalId, lrq.Prim.LocalId, e); throw; } } private CompiledScript LoadScriptFromDisk(UUID scriptAssetId) { string cacheDir = this.LookupDirectoryFromId(PhloxConstants.COMPILE_CACHE_DIR, scriptAssetId); string scriptPath = Path.Combine(cacheDir, scriptAssetId.ToString() + PhloxConstants.COMPILED_SCRIPT_EXTENSION); if (File.Exists(scriptPath)) { SerializedScript serScript; using (var file = File.OpenRead(scriptPath)) { serScript = ProtoBuf.Serializer.Deserialize<SerializedScript>(file); if (serScript == null) { throw new LoaderException(String.Format("Script {0} failed to deserialize from source at {1}", scriptAssetId, scriptPath)); } } return serScript.ToCompiledScript(); } else { throw new LoaderException(String.Format("Script {0} could not be found at {1}", scriptAssetId, scriptPath)); } } private string LookupDirectoryFromId(string baseDir, UUID id) { return Path.Combine(baseDir, id.ToString().Substring(0, PhloxConstants.CACHE_PREFIX_LEN)); } private bool ScriptIsCached(UUID scriptAssetId) { string cacheDir = this.LookupDirectoryFromId(PhloxConstants.COMPILE_CACHE_DIR, scriptAssetId); string scriptPath = Path.Combine(cacheDir, scriptAssetId.ToString() + PhloxConstants.COMPILED_SCRIPT_EXTENSION); if (File.Exists(scriptPath)) { return true; } return false; } /// <summary> /// If the script asset is found cached, we start a new instance of it /// </summary> /// <param name="scriptAssetId"></param> /// <returns></returns> private bool TryStartSharedScript(UUID scriptAssetId, LoadUnloadRequest loadRequest) { LoadedScript script; if (_loadedScripts.TryGetValue(scriptAssetId, out script)) { //only adjust ref counts if this is not a reload if (loadRequest.RequestType != LoadUnloadRequest.LUType.Reload) { script.RefCount++; } //check the part in the load request for this script. //even though we're not using the passed in script asset, //we should still do cleanup ClearSerializedScriptData(loadRequest, scriptAssetId); _log.InfoFormat("[Phlox]: Starting shared script {0} in item {1} owner {2} part {3}", scriptAssetId, loadRequest.ItemId, loadRequest.Prim.ParentGroup.OwnerID, loadRequest.Prim.LocalId); BeginScriptRun(loadRequest, script.Script); return true; } return false; } private void ClearSerializedScriptData(LoadUnloadRequest loadRequest, UUID scriptAssetId) { Dictionary<UUID, byte[]> dictionary = loadRequest.Prim.SerializedScriptByteCode; if (dictionary == null) return; if (dictionary.ContainsKey(scriptAssetId)) { dictionary.Remove(scriptAssetId); } if (dictionary.Count == 0) { loadRequest.Prim.SerializedScriptByteCode = null; } } /// <summary> /// Attempt to load state from the correct source /// </summary> /// <param name="loadRequest">The request that sparked the script load</param> /// <returns>The runtime state or null</returns> private RuntimeState TryLoadState(LoadUnloadRequest loadRequest) { try { switch (loadRequest.StateSource) { case OpenSim.Region.Framework.ScriptStateSource.RegionLocalDisk: return _stateManager.LoadStateFromDisk(loadRequest.ItemId); case OpenSim.Region.Framework.ScriptStateSource.PrimData: return _stateManager.LoadStateFromPrim(loadRequest.ItemId, loadRequest.OldItemId, loadRequest.Prim); } return null; } catch (Exception e) { _log.ErrorFormat("[Phlox]: Loading script state failed for {0}, {1}", loadRequest.ItemId, e); } return null; } private bool WorkIsPending() { lock (_outstandingLoadUnloadRequests) { if (_outstandingLoadUnloadRequests.Count > 0 || _waitingForCompilation.Count > 0) { return true; } } lock (_outstandingBytecodeRequests) { if (_outstandingBytecodeRequests.Count > 0) { return true; } } return false; } private UUID FindAssetId(LoadUnloadRequest lrq) { TaskInventoryItem item = lrq.Prim.Inventory.GetInventoryItem(lrq.ItemId); if (item == null) { _log.ErrorFormat("[Phlox]: Could not find inventory item {0} in primitive {1} ({2}) to start script", lrq.ItemId, lrq.Prim.Name, lrq.Prim.UUID); return UUID.Zero; } else { return item.AssetID; } } internal void Stop() { } internal void PostRetrieveByteCodeRequest(RetrieveBytecodeRequest rbRequest) { lock (_outstandingBytecodeRequests) { _outstandingBytecodeRequests.Enqueue(rbRequest); } _workArrived(); } private bool CheckAndRetrieveBytecodes() { List<RetrieveBytecodeRequest> reqs; lock (_outstandingBytecodeRequests) { if (_outstandingBytecodeRequests.Count == 0) return false; reqs = new List<RetrieveBytecodeRequest>(_outstandingBytecodeRequests); _outstandingBytecodeRequests.Clear(); } foreach (var req in reqs) { Dictionary<UUID, byte[]> bytecodes = new Dictionary<UUID, byte[]>(); foreach (UUID id in req.ScriptIds) { if (!bytecodes.ContainsKey(id)) { LoadedScript script; if (_loadedScripts.TryGetValue(id, out script)) { byte[] serializedScript = ReserializeScript(script.Script); bytecodes.Add(id, serializedScript); } } } req.Bytecodes = bytecodes; req.SignalDataReady(); } return true; } /// <summary> /// Reserializes a compiled script into a form again usable to pass over the wire or write to disk /// </summary> /// <param name="compiledScript"></param> /// <returns></returns> private byte[] ReserializeScript(CompiledScript compiledScript) { SerializedScript script = SerializedScript.FromCompiledScript(compiledScript); using (MemoryStream memStream = new MemoryStream()) { ProtoBuf.Serializer.Serialize(memStream, script); return memStream.ToArray(); } } } }
#region MigraDoc - Creating Documents on the Fly // // Authors: // Stefan Lange (mailto:Stefan.Lange@pdfsharp.com) // Klaus Potzesny (mailto:Klaus.Potzesny@pdfsharp.com) // David Stephensen (mailto:David.Stephensen@pdfsharp.com) // // Copyright (c) 2001-2009 empira Software GmbH, Cologne (Germany) // // http://www.pdfsharp.com // http://www.migradoc.com // http://sourceforge.net/projects/pdfsharp // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #endregion using System; using System.Diagnostics; using System.Reflection; using MigraDoc.DocumentObjectModel.IO; using MigraDoc.DocumentObjectModel.Internals; using MigraDoc.DocumentObjectModel.Visitors; namespace MigraDoc.DocumentObjectModel.Tables { /// <summary> /// Represents a row of a table. /// </summary> public class Row : DocumentObject, IVisitable { /// <summary> /// Initializes a new instance of the Row class. /// </summary> public Row() { } /// <summary> /// Initializes a new instance of the Row class with the specified parent. /// </summary> internal Row(DocumentObject parent) : base(parent) { } #region Methods /// <summary> /// Creates a deep copy of this object. /// </summary> public new Row Clone() { return (Row)DeepCopy(); } /// <summary> /// Implements the deep copy of the object. /// </summary> protected override object DeepCopy() { Row row = (Row)base.DeepCopy(); if (row.format != null) { row.format = row.format.Clone(); row.format.parent = row; } if (row.borders != null) { row.borders = row.borders.Clone(); row.borders.parent = row; } if (row.shading != null) { row.shading = row.shading.Clone(); row.shading.parent = row; } if (row.cells != null) { row.cells = row.cells.Clone(); row.cells.parent = row; } return row; } #endregion #region Properties /// <summary> /// Gets the table the row belongs to. /// </summary> public Table Table { get { if (this.table == null) { Rows rws = this.Parent as Rows; if (rws != null) this.table = rws.Table; } return this.table; } } Table table; /// <summary> /// Gets the index of the row. First row has index 0. /// </summary> public int Index { get { if (index.IsNull) // optimization from AndrewT's patch - http://www.pakeha_by.my-webs.org/downloads/MigraDoc-1.32-TablePatch.patch { Rows rws = this.parent as Rows; rws.PopulateItemIndexes(); // optimization from AndrewT's patch - http://www.pakeha_by.my-webs.org/downloads/MigraDoc-1.32-TablePatch.patch } return index; } } [DV] internal NInt index = NInt.NullValue; /// <summary> /// Gets a cell by its column index. The first cell has index 0. /// </summary> public Cell this[int index] { get { return Cells[index]; } } /// <summary> /// Gets or sets the default style name for all cells of the row. /// </summary> public string Style { get { return this.style.Value; } set { this.style.Value = value; } } [DV] internal NString style = NString.NullValue; /// <summary> /// Gets the default ParagraphFormat for all cells of the row. /// </summary> public ParagraphFormat Format { get { if (this.format == null) this.format = new ParagraphFormat(this); return this.format; } set { SetParent(value); this.format = value; } } [DV] internal ParagraphFormat format; /// <summary> /// Gets or sets the default vertical alignment for all cells of the row. /// </summary> public VerticalAlignment VerticalAlignment { get { return (VerticalAlignment)this.verticalAlignment.Value; } set { this.verticalAlignment.Value = (int)value; } } [DV(Type = typeof(VerticalAlignment))] internal NEnum verticalAlignment = NEnum.NullValue(typeof(VerticalAlignment)); /// <summary> /// Gets or sets the height of the row. /// </summary> public Unit Height { get { return this.height; } set { this.height = value; } } [DV] internal Unit height = Unit.NullValue; /// <summary> /// Gets or sets the rule which is used to determine the height of the row. /// </summary> public RowHeightRule HeightRule { get { return (RowHeightRule)this.heightRule.Value; } set { this.heightRule.Value = (int)value; } } [DV(Type = typeof(RowHeightRule))] internal NEnum heightRule = NEnum.NullValue(typeof(RowHeightRule)); /// <summary> /// Gets or sets the default value for all cells of the row. /// </summary> public Unit TopPadding { get { return this.topPadding; } set { this.topPadding = value; } } [DV] internal Unit topPadding = Unit.NullValue; /// <summary> /// Gets or sets the default value for all cells of the row. /// </summary> public Unit BottomPadding { get { return this.bottomPadding; } set { this.bottomPadding = value; } } [DV] internal Unit bottomPadding = Unit.NullValue; /// <summary> /// Gets or sets a value which define whether the row is a header. /// </summary> public bool HeadingFormat { get { return this.headingFormat.Value; } set { this.headingFormat.Value = value; } } [DV] internal NBool headingFormat = NBool.NullValue; /// <summary> /// Gets the default Borders object for all cells of the row. /// </summary> public Borders Borders { get { if (this.borders == null) this.borders = new Borders(this); return this.borders; } set { SetParent(value); this.borders = value; } } [DV] internal Borders borders; /// <summary> /// Gets the default Shading object for all cells of the row. /// </summary> public Shading Shading { get { if (this.shading == null) this.shading = new Shading(this); return this.shading; } set { SetParent(value); this.shading = value; } } [DV] internal Shading shading; /// <summary> /// Gets or sets the number of rows that should be /// kept together with the current row in case of a page break. /// </summary> public int KeepWith { get { return this.keepWith.Value; } set { this.keepWith.Value = value; } } [DV] internal NInt keepWith = NInt.NullValue; /// <summary> /// Gets the Cells collection of the table. /// </summary> public Cells Cells { get { if (this.cells == null) this.cells = new Cells(this); return this.cells; } set { SetParent(value); this.cells = value; } } [DV] internal Cells cells; /// <summary> /// Gets or sets a comment associated with this object. /// </summary> public string Comment { get { return this.comment.Value; } set { this.comment.Value = value; } } [DV] internal NString comment = NString.NullValue; #endregion #region Internal /// <summary> /// Converts Row into DDL. /// </summary> internal override void Serialize(Serializer serializer) { serializer.WriteComment(this.comment.Value); serializer.WriteLine("\\row"); int pos = serializer.BeginAttributes(); if (this.style.Value != String.Empty) serializer.WriteSimpleAttribute("Style", this.Style); if (!this.IsNull("Format")) this.format.Serialize(serializer, "Format", null); if (!this.height.IsNull) serializer.WriteSimpleAttribute("Height", this.Height); if (!this.heightRule.IsNull) serializer.WriteSimpleAttribute("HeightRule", this.HeightRule); if (!this.topPadding.IsNull) serializer.WriteSimpleAttribute("TopPadding", this.TopPadding); if (!this.bottomPadding.IsNull) serializer.WriteSimpleAttribute("BottomPadding", this.BottomPadding); if (!this.headingFormat.IsNull) serializer.WriteSimpleAttribute("HeadingFormat", this.HeadingFormat); if (!this.verticalAlignment.IsNull) serializer.WriteSimpleAttribute("VerticalAlignment", this.VerticalAlignment); if (!this.keepWith.IsNull) serializer.WriteSimpleAttribute("KeepWith", this.KeepWith); //Borders & Shading if (!this.IsNull("Borders")) this.borders.Serialize(serializer, null); if (!this.IsNull("Shading")) this.shading.Serialize(serializer); serializer.EndAttributes(pos); serializer.BeginContent(); if (!IsNull("Cells")) this.cells.Serialize(serializer); serializer.EndContent(); } /// <summary> /// Allows the visitor object to visit the document object and it's child objects. /// </summary> void IVisitable.AcceptVisitor(DocumentObjectVisitor visitor, bool visitChildren) { visitor.VisitRow(this); foreach (Cell cell in this.cells) ((IVisitable)cell).AcceptVisitor(visitor, visitChildren); } /// <summary> /// Returns the meta object of this instance. /// </summary> internal override Meta Meta { get { if (meta == null) meta = new Meta(typeof(Row)); return meta; } } static Meta meta; #endregion } }
using System; using System.Collections.Generic; using System.Text; using liquicode.AppTools; using NUnit.Framework; //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ public abstract class Test_Vector<TVector, TValue> where TVector : DataStructures.GenericVector<TValue>, new() { //-------------------------------------------------------------------- protected abstract TValue TestValue( int Index_in ); //-------------------------------------------------------------------- private TVector NewTestVector( int Length_in ) { TVector vector = new TVector(); for( int ndx = 0; ndx < Length_in; ndx++ ) { vector.Add( this.TestValue( ndx ) ); } return vector; } //-------------------------------------------------------------------- private void AssertTestVector( TVector Vector_in, int Length_in ) { Assert.AreEqual( Length_in, Vector_in.Count, "Invalid Vector.Count." ); for( int ndx = 0; ndx < Length_in; ndx++ ) { Assert.AreEqual( this.TestValue( ndx ), Vector_in[ ndx ], "Assert.AreEqual failed at element " + ndx ); } return; } //-------------------------------------------------------------------- [Test] public void Test_00_DefaultConstructor() { TVector vector = new TVector(); Assert.IsNotNull( vector ); return; } //-------------------------------------------------------------------- [Test] public void Test_01_DisallowUnintentionalDeepCopyConstructor() { // Make sure we don't define a copy constructor which performs an unintentional deep copy. TVector vector = this.NewTestVector( 1000 ); TVector vector2 = vector; vector.Clear(); Assert.AreEqual( vector.Count, vector2.Count ); return; } //-------------------------------------------------------------------- [Test] public void Test_02_TestVector() { TVector vector = this.NewTestVector( 1000 ); this.AssertTestVector( vector, 1000 ); return; } //-------------------------------------------------------------------- [Test] public void Test_10_FillLength() { TVector vector = new TVector(); TValue value42 = this.TestValue( 42 ); vector.Fill( value42, 1000 ); for( int ndx = 0; ndx < 1000; ndx++ ) { Assert.AreEqual( value42, vector[ ndx ] ); } return; } //-------------------------------------------------------------------- [Test] public void Test_11_FillAll() { TVector vector = new TVector(); TValue value8 = this.TestValue( 8 ); TValue value42 = this.TestValue( 42 ); vector.Fill( value8, 1000 ); vector.Fill( value42 ); for( int ndx = 0; ndx < 1000; ndx++ ) { Assert.AreEqual( value42, vector[ ndx ] ); } return; } //-------------------------------------------------------------------- [Test] public void Test_12_Substitute() { TVector vector = new TVector(); TValue value8 = this.TestValue( 8 ); TValue value42 = this.TestValue( 42 ); vector.Fill( value8, 1000 ); vector.Substitute( value8, value42 ); for( int ndx = 0; ndx < 1000; ndx++ ) { Assert.AreEqual( value42, vector[ ndx ] ); } return; } //-------------------------------------------------------------------- [Test] public void Test_20_ExplicitArrayCast() { TVector vector = this.NewTestVector( 1000 ); TValue[] array = (TValue[])vector; Assert.AreEqual( array.Length, vector.Count, "Array/Vector lentgh mismatch." ); TVector vector2 = new TVector(); vector2.CopyFromArray( array ); this.AssertTestVector( vector2, 1000 ); return; } //-------------------------------------------------------------------- [Test] public void Test_21_ImplicitArrayCast() { TVector vector = this.NewTestVector( 1000 ); TValue[] array = vector; Assert.AreEqual( array.Length, vector.Count, "Array/Vector lentgh mismatch." ); TVector vector2 = new TVector(); vector2.CopyFromArray( array ); this.AssertTestVector( vector2, 1000 ); return; } //-------------------------------------------------------------------- [Test] public void Test_30_Equality() { TVector vector = this.NewTestVector( 1000 ); TVector vector2 = new TVector(); vector2.CopyFromArray( vector.ToArray() ); Assert.IsTrue( vector.Equals( vector2 ) ); Assert.IsTrue( (vector == vector2) ); Assert.IsFalse( (vector != vector2) ); return; } //-------------------------------------------------------------------- [Test] public void Test_31_Inequality() { TVector vector = this.NewTestVector( 1000 ); TVector vector2 = new TVector(); vector2.CopyFromArray( vector.ToArray() ); vector2[ vector2.Count - 1 ] = default( TValue ); Assert.IsFalse( vector.Equals( vector2 ) ); Assert.IsFalse( (vector == vector2) ); Assert.IsTrue( (vector != vector2) ); return; } //-------------------------------------------------------------------- [Test] public void Test_40_SerializeByteArray() { TVector vector = this.NewTestVector( 1000 ); TVector vector2 = new TVector(); byte[] bytes = vector.ToByteArray(); vector2.CopyFromByteArray( bytes ); this.AssertTestVector( vector2, 1000 ); return; } //-------------------------------------------------------------------- [Test] public void Test_41_SerializeStream() { TVector vector = this.NewTestVector( 1000 ); TVector vector2 = new TVector(); using( System.IO.MemoryStream buffer = new System.IO.MemoryStream() ) { vector.Serialize( buffer ); buffer.Position = 0; vector2.Deserialize( buffer ); } this.AssertTestVector( vector2, 1000 ); return; } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_floatVector : Test_Vector<DataStructures.FloatVector, float> { protected override float TestValue( int Index_in ) { return Index_in; } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_doubleVector : Test_Vector<DataStructures.DoubleVector, double> { protected override double TestValue( int Index_in ) { return Index_in; } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_byteVector : Test_Vector<DataStructures.ByteVector, byte> { protected override byte TestValue( int Index_in ) { return (byte)(Index_in % (byte.MaxValue + 1)); } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_intVector : Test_Vector<DataStructures.IntVector, int> { protected override int TestValue( int Index_in ) { return Index_in; } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_longVector : Test_Vector<DataStructures.LongVector, long> { protected override long TestValue( int Index_in ) { return Index_in; } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_stringVector : Test_Vector<DataStructures.StringVector, string> { protected override string TestValue( int Index_in ) { return Index_in.ToString(); } } //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ //|||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||\\ [TestFixture] public class Test_DateTimeVector : Test_Vector<DataStructures.DateTimeVector, DateTime> { protected override DateTime TestValue( int Index_in ) { return (new DateTime( 2000, 1, 1 )).AddDays( Index_in ); } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using Xunit; namespace System.Xml.Tests { public class DateTimeElementContentTests { [Fact] public static void ReadElementContentAsDateTime1() { var reader = Utils.CreateFragmentReader("<Root><![CDATA[9]]>999Z</Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(9999, 1, 1, 0, 0, 0, 0).Add(new TimeSpan(0, 0, 0)), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime10() { var reader = Utils.CreateFragmentReader("<Root> 20<?a?>02-1<![CDATA[2]]>-3<!-- Comment inbetween-->0 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2002, 12, 30, 0, 0, 0), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime11() { var reader = Utils.CreateFragmentReader("<Root> 2000-0<![CDATA[2]]>-29T23:59:59.999<?a?>9999 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2000, 2, 29, 23, 59, 59).AddTicks(9999999), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime12() { var reader = Utils.CreateFragmentReader("<Root> 2<?a?>00<!-- Comment inbetween-->0-02-29T23:59:5<?a?>9-13:<![CDATA[60]]> </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2000, 2, 29, 23, 59, 59).Add(TimeZoneInfo.Local.GetUtcOffset(new DateTime(2000, 2, 29)) + new TimeSpan(14, 0, 0)), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime13() { var reader = Utils.CreateFragmentReader("<Root> 2002-12-33 </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime14() { var reader = Utils.CreateFragmentReader("<Root>0001-<![CDATA[0<!-- Comment inbetween-->1]]>-01T0<?a?>0:00:00</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime15() { var reader = Utils.CreateFragmentReader("<Root> 20<?9?>02-1<![CDATA[2]]>-3<!-- Comment inbetween-->0 </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime16() { var reader = Utils.CreateFragmentReader("<Root> 000<!-- Comment inbetween-->1-01-01T00:00:00-14:00z </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime17() { var reader = Utils.CreateFragmentReader("<Root>9999-1<![CDATA[0<!-- Comment inbetween-->1]]>-31T12:59:59+14:00z</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime18() { var reader = Utils.CreateFragmentReader("<Root>9999-12-31T12:59:60-11:00</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime19() { var reader = Utils.CreateFragmentReader("<Root> 0 </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime2() { var reader = Utils.CreateFragmentReader("<Root> <![CDATA[2]]>00<?a?>2-1<!-- Comment inbetween-->2-30Z </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2002, 12, 30, 0, 0, 0, 0).Add(new TimeSpan(0, 0, 0)), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime20() { var reader = Utils.CreateFragmentReader("<Root> 9999 Z </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime21() { var reader = Utils.CreateFragmentReader("<Root>ABCD</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime22() { var reader = Utils.CreateFragmentReader("<Root> yyyy-MM-ddTHH:mm </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime23() { var reader = Utils.CreateFragmentReader("<Root>2100-02-29T23:59:59.9999999+13:60</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime24() { var reader = Utils.CreateFragmentReader("<Root> 3000-02-29T23:59:59.999999999999 -13:60 </Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime25() { var reader = Utils.CreateFragmentReader("<Root> <![CDATA[2]]>00<?a?>2-1<!-- Comment inbetween-->2-30Z </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2002, 12, 30, 0, 0, 0, 0).Add(new TimeSpan(0, 0, 0)), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime26() { var reader = Utils.CreateFragmentReader("<Root><![CDATA[9]]>999Z</Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(9999, 1, 1, 0, 0, 0, 0).Add(new TimeSpan(0, 0, 0)), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime27() { var reader = Utils.CreateFragmentReader("<Root> 2<?a?>00<!-- Comment inbetween-->0-02-29T23:59:5<?a?>9-13:<![CDATA[60]]> </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2000, 2, 29, 23, 59, 59).Add(TimeZoneInfo.Local.GetUtcOffset(new DateTime(2000, 2, 29)) + new TimeSpan(14, 0, 0)), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime28() { var reader = Utils.CreateFragmentReader("<Root> 2000-0<![CDATA[2]]>-29T23:59:59.999<?a?>9999 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2000, 2, 29, 23, 59, 59).AddTicks(9999999), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime29() { var reader = Utils.CreateFragmentReader("<Root> 20<?a?>02-1<![CDATA[2]]>-3<!-- Comment inbetween-->0 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2002, 12, 30, 0, 0, 0), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime3() { var reader = Utils.CreateFragmentReader("<Root> <!-- Comment inbetween-->0002-01-01T00:00:00+00:00 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2, 1, 1, 0, 0, 0).Add(TimeZoneInfo.Local.GetUtcOffset(new DateTime(2, 1, 1))), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime30() { var reader = Utils.CreateFragmentReader("<Root> <!-- Comment inbetween-->0002-01-01T00:00:00+00:00 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(2, 1, 1, 0, 0, 0).Add(TimeZoneInfo.Local.GetUtcOffset(new DateTime(2, 1, 1))), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime31() { var reader = Utils.CreateFragmentReader("<Root>0001-<![CDATA[01]]>-01T0<?a?>0:00:00<!-- Comment inbetween--></Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(1, 1, 1, 0, 0, 0), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime32() { var reader = Utils.CreateFragmentReader("<Root>99<!-- Comment inbetween-->99-1<?a?>2-31T1<![CDATA[2]]>:59:59</Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(9999, 12, 31, 12, 59, 59), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime33() { var reader = Utils.CreateFragmentReader("<Root> 0<?a?>0:0<!-- Comment inbetween-->0:00+00:00 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day, 0, 0, 0, DateTimeKind.Utc).ToLocalTime(), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime34() { var reader = Utils.CreateFragmentReader("<Root>00<!-- Comment inbetween-->01</Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(1, 1, 1, 0, 0, 0), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime35() { var reader = Utils.CreateFragmentReader("<Root> 999<!-- Comment inbetween-->9 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(9999, 1, 1, 0, 0, 0), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime36() { var reader = Utils.CreateFragmentReader("<Root> <![CDATA[0]]>001Z </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(1, 1, 1, 0, 0, 0, 0).Add(new TimeSpan(0, 0, 0)), reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime37() { var reader = Utils.CreateFragmentReader("<Root>2100-02-29T23:59:59.9999999+13:60</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime38() { var reader = Utils.CreateFragmentReader("<Root>001-01-01T00:00:00+00:00</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime39() { var reader = Utils.CreateFragmentReader("<Root>0001-01-01T00:00:00-14:01Z</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime4() { var reader = Utils.CreateFragmentReader("<Root>0001-<![CDATA[01]]>-01T0<?a?>0:00:00<!-- Comment inbetween--></Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(1, 1, 1, 0, 0, 0), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime40() { var reader = Utils.CreateFragmentReader("<Root>999<?9?>9-12-31T12:59:59+15:00Z</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime41() { var reader = Utils.CreateFragmentReader("<Root>9999-12-31T12:59:60-11:00</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime42() { var reader = Utils.CreateFragmentReader("<Root>0</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime43() { var reader = Utils.CreateFragmentReader("<Root><![CDATA[9<!-- Comment inbetween-->]]>999Z</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime44() { var reader = Utils.CreateFragmentReader("<Root>ABCD</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime45() { var reader = Utils.CreateFragmentReader("<Root>yyyy-MM-ddTHH:mm</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime46() { var reader = Utils.CreateFragmentReader("<Root>2002-12-33</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime47() { var reader = Utils.CreateFragmentReader("<Root>3000-02-29T23:59:59.999999999999-13:60</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime48() { var reader = Utils.CreateFragmentReader("<Root>2100-02-29T23:59:5<![CDATA[9]]>.999999999999Z</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime49() { var reader = Utils.CreateFragmentReader("<Root>3000-02-29T2<?9?>3:59:59.99<![CDATA[9]]><?a?>99<!-- Comment inbetween-->9999999999z</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime5() { var reader = Utils.CreateFragmentReader("<Root>99<!-- Comment inbetween-->99-1<?a?>2-31T1<![CDATA[2]]>:59:59</Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(9999, 12, 31, 12, 59, 59), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime50() { var reader = Utils.CreateFragmentReader("<Root>2002-13-30</Root>"); reader.PositionOnElement("Root"); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); Assert.Throws<XmlException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", "")); } [Fact] public static void ReadElementContentAsDateTime6() { var reader = Utils.CreateFragmentReader("<Root> 0<?a?>0:0<!-- Comment inbetween-->0:00+00:00 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day, 0, 0, 0, DateTimeKind.Utc).ToLocalTime(), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime7() { var reader = Utils.CreateFragmentReader("<Root>00<!-- Comment inbetween-->01</Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(1, 1, 1, 0, 0, 0), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime8() { var reader = Utils.CreateFragmentReader("<Root> 999<!-- Comment inbetween-->9 </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(9999, 1, 1, 0, 0, 0), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTime9() { var reader = Utils.CreateFragmentReader("<Root> <![CDATA[0]]>001Z </Root>"); reader.PositionOnElement("Root"); Assert.Equal(new DateTime(1, 1, 1, 0, 0, 0, 0).Add(new TimeSpan(0, 0, 0)), (DateTime)reader.ReadElementContentAs(typeof(DateTime), null)); } [Fact] public static void ReadElementContentAsDateTimenull1() { var reader = Utils.CreateFragmentReader("<Root>999</Root>"); reader.PositionOnElement("Root"); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, null, "")); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, null, "")); } [Fact] public static void ReadElementContentAsDateTimenull2() { var reader = Utils.CreateFragmentReader("<Root>9999</Root>"); reader.PositionOnElement("Root"); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, null, "")); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, null, "")); } [Fact] public static void ReadElementContentAsDateTimenull3() { var reader = Utils.CreateFragmentReader("<Root>9999</Root>"); reader.PositionOnElement("Root"); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", null)); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", null)); } [Fact] public static void ReadElementContentAsDateTimenull4() { var reader = Utils.CreateFragmentReader("<Root>999</Root>"); reader.PositionOnElement("Root"); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", null)); Assert.Throws<ArgumentNullException>(() => reader.ReadElementContentAs(typeof(DateTime), null, "Root", null)); } } }
/* * Copyright 2002-2015 Drew Noakes * * Modified by Yakov Danilov <yakodani@gmail.com> for Imazen LLC (Ported from Java to C#) * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * https://drewnoakes.com/code/exif/ * https://github.com/drewnoakes/metadata-extractor */ using System; using System.Collections.Generic; using System.Linq; using JetBrains.Annotations; using Sharpen; namespace Com.Drew.Metadata { /// <summary>A top-level object that holds the metadata values extracted from an image.</summary> /// <remarks> /// A top-level object that holds the metadata values extracted from an image. /// <p> /// Metadata objects may contain zero or more /// <see cref="Directory"/> /// objects. Each directory may contain zero or more tags /// with corresponding values. /// </remarks> /// <author>Drew Noakes https://drewnoakes.com</author> public sealed class Metadata { [NotNull] private readonly IDictionary<Type, ICollection<Com.Drew.Metadata.Directory>> _directoryListByClass = new Dictionary<Type, ICollection<Com.Drew.Metadata.Directory>>(); /// <summary> /// Returns an iterable set of the /// <see cref="Directory"/> /// instances contained in this metadata collection. /// </summary> /// <returns>an iterable set of directories</returns> [NotNull] public Iterable<Com.Drew.Metadata.Directory> GetDirectories() { return new Metadata.DirectoryIterable(_directoryListByClass); } [CanBeNull] public ICollection<T> GetDirectoriesOfType<T>() where T : Com.Drew.Metadata.Directory { System.Type type = typeof(T); return (from item in _directoryListByClass.Get(type) select (T) item).ToList(); } /// <summary>Returns the count of directories in this metadata collection.</summary> /// <returns>the number of unique directory types set for this metadata collection</returns> public int GetDirectoryCount() { int count = 0; foreach (KeyValuePair<Type, ICollection<Com.Drew.Metadata.Directory>> pair in _directoryListByClass.EntrySet()) { count += pair.Value.Count; } return count; } /// <summary>Adds a directory to this metadata collection.</summary> /// <param name="directory"> /// the /// <see cref="Directory"/> /// to add into this metadata collection. /// </param> public void AddDirectory<T>([NotNull] T directory) where T : Com.Drew.Metadata.Directory { GetOrCreateDirectoryList(directory.GetType()).Add(directory); } /// <summary> /// Gets the first /// <see cref="Directory"/> /// of the specified type contained within this metadata collection. /// If no instances of this type are present, <code>null</code> is returned. /// </summary> /// <param name="type">the Directory type</param> /// <?/> /// <returns>the first Directory of type T in this metadata collection, or <code>null</code> if none exist</returns> [CanBeNull] public T GetFirstDirectoryOfType<T>() where T : Com.Drew.Metadata.Directory { System.Type type = typeof(T); // We suppress the warning here as the code asserts a map signature of Class<T>,T. // So after get(Class<T>) it is for sure the result is from type T. ICollection<Com.Drew.Metadata.Directory> list = GetDirectoryList(type); if (list == null || list.IsEmpty()) { return null; } return (T)list.Iterator().Next(); } /// <summary>Indicates whether an instance of the given directory type exists in this Metadata instance.</summary> /// <param name="type"> /// the /// <see cref="Directory"/> /// type /// </param> /// <returns> /// <code>true</code> if a /// <see cref="Directory"/> /// of the specified type exists, otherwise <code>false</code> /// </returns> public bool ContainsDirectoryOfType(Type type) { ICollection<Com.Drew.Metadata.Directory> list = GetDirectoryList(type); return list != null && !list.IsEmpty(); } /// <summary>Indicates whether any errors were reported during the reading of metadata values.</summary> /// <remarks> /// Indicates whether any errors were reported during the reading of metadata values. /// This value will be true if Directory.hasErrors() is true for one of the contained /// <see cref="Directory"/> /// objects. /// </remarks> /// <returns>whether one of the contained directories has an error</returns> public bool HasErrors() { foreach (Com.Drew.Metadata.Directory directory in GetDirectories()) { if (directory.HasErrors()) { return true; } } return false; } public override string ToString() { int count = GetDirectoryCount(); return Sharpen.Extensions.StringFormat("Metadata (%d %s)", count, count == 1 ? "directory" : "directories"); } [CanBeNull] private ICollection<Com.Drew.Metadata.Directory> GetDirectoryList(Type type) { return _directoryListByClass.Get(type); } [NotNull] private ICollection<Com.Drew.Metadata.Directory> GetOrCreateDirectoryList(System.Type type) { ICollection<Com.Drew.Metadata.Directory> collection = GetDirectoryList(type); if (collection != null) { return collection; } collection = new AList<Com.Drew.Metadata.Directory>(); _directoryListByClass.Put(type, collection); return collection; } private class DirectoryIterable : Iterable<Com.Drew.Metadata.Directory> { private readonly IDictionary<Type, ICollection<Com.Drew.Metadata.Directory>> _map; public DirectoryIterable(IDictionary<Type, ICollection<Com.Drew.Metadata.Directory>> map) { _map = map; } public override Sharpen.Iterator<Com.Drew.Metadata.Directory> Iterator() { return new Metadata.DirectoryIterable.DirectoryIterator(_map); } private class DirectoryIterator : Iterator<Com.Drew.Metadata.Directory> { [NotNull] private readonly Iterator<KeyValuePair<Type, ICollection<Com.Drew.Metadata.Directory>>> _mapIterator; [CanBeNull] private Iterator<Com.Drew.Metadata.Directory> _listIterator; public DirectoryIterator(IDictionary<Type, ICollection<Com.Drew.Metadata.Directory>> map) { _mapIterator = map.EntrySet().Iterator(); if (_mapIterator.HasNext()) { _listIterator = _mapIterator.Next().Value.Iterator(); } } public override bool HasNext() { return _listIterator != null && (_listIterator.HasNext() || _mapIterator.HasNext()); } public override Com.Drew.Metadata.Directory Next() { if (_listIterator == null || (!_listIterator.HasNext() && !_mapIterator.HasNext())) { throw new NoSuchElementException(); } while (!_listIterator.HasNext()) { _listIterator = _mapIterator.Next().Value.Iterator(); } return _listIterator.Next(); } public override void Remove() { throw new NotSupportedException(); } } } } }
namespace RiakClient.Comms { using System; using System.Collections.Generic; using Commands; using Config; using Exceptions; using Messages; internal class RiakConnection : IRiakConnection { private readonly RiakPbcSocket socket; public RiakConnection(IRiakNodeConfiguration nodeConfiguration, IRiakAuthenticationConfiguration authConfiguration) { socket = new RiakPbcSocket(nodeConfiguration, authConfiguration); } public RiakResult<TResult> PbcRead<TResult>() where TResult : class, new() { try { var result = socket.Read<TResult>(); return RiakResult<TResult>.Success(result); } catch (RiakException ex) { if (ex.NodeOffline) { Disconnect(); } if (ex.Message.Contains("Bucket cannot be zero-length") || ex.Message.Contains("Key cannot be zero-length")) { return RiakResult<TResult>.FromException(ResultCode.InvalidRequest, ex, ex.NodeOffline); } return RiakResult<TResult>.FromException(ResultCode.CommunicationError, ex, ex.NodeOffline); } catch (Exception ex) { Disconnect(); return RiakResult<TResult>.FromException(ResultCode.CommunicationError, ex, true); } } public RiakResult PbcRead(MessageCode expectedMessageCode) { try { socket.Read(expectedMessageCode); return RiakResult.Success(); } catch (RiakException ex) { if (ex.NodeOffline) { Disconnect(); } return RiakResult.FromException(ResultCode.CommunicationError, ex, ex.NodeOffline); } catch (Exception ex) { Disconnect(); return RiakResult.FromException(ResultCode.CommunicationError, ex, true); } } public RiakResult<IEnumerable<RiakResult<TResult>>> PbcRepeatRead<TResult>(Func<RiakResult<TResult>, bool> repeatRead) where TResult : class, new() { var results = new List<RiakResult<TResult>>(); try { RiakResult<TResult> result; do { result = RiakResult<TResult>.Success(socket.Read<TResult>()); results.Add(result); } while (repeatRead(result)); return RiakResult<IEnumerable<RiakResult<TResult>>>.Success(results); } catch (RiakException ex) { if (ex.NodeOffline) { Disconnect(); } return RiakResult<IEnumerable<RiakResult<TResult>>>.FromException(ResultCode.CommunicationError, ex, ex.NodeOffline); } catch (Exception ex) { Disconnect(); return RiakResult<IEnumerable<RiakResult<TResult>>>.FromException(ResultCode.CommunicationError, ex, true); } } public RiakResult PbcWrite<TRequest>(TRequest request) where TRequest : class { try { socket.Write(request); return RiakResult.Success(); } catch (RiakException ex) { if (ex.NodeOffline) { Disconnect(); } return RiakResult.FromException(ResultCode.CommunicationError, ex, ex.NodeOffline); } catch (Exception ex) { Disconnect(); return RiakResult.FromException(ResultCode.CommunicationError, ex, true); } } public RiakResult PbcWrite(MessageCode messageCode) { try { socket.Write(messageCode); return RiakResult.Success(); } catch (RiakException ex) { if (ex.NodeOffline) { Disconnect(); } return RiakResult.FromException(ResultCode.CommunicationError, ex, ex.NodeOffline); } catch (Exception ex) { Disconnect(); return RiakResult.FromException(ResultCode.CommunicationError, ex, true); } } public RiakResult<TResult> PbcWriteRead<TRequest, TResult>(TRequest request) where TRequest : class where TResult : class, new() { var writeResult = PbcWrite(request); if (writeResult.IsSuccess) { return PbcRead<TResult>(); } return new RiakResult<TResult>(writeResult); } public RiakResult PbcWriteRead<TRequest>(TRequest request, MessageCode expectedMessageCode) where TRequest : class { var writeResult = PbcWrite(request); if (writeResult.IsSuccess) { return PbcRead(expectedMessageCode); } return writeResult; } public RiakResult<TResult> PbcWriteRead<TResult>(MessageCode messageCode) where TResult : class, new() { var writeResult = PbcWrite(messageCode); if (writeResult.IsSuccess) { return PbcRead<TResult>(); } return new RiakResult<TResult>(writeResult); } public RiakResult PbcWriteRead(MessageCode messageCode, MessageCode expectedMessageCode) { var writeResult = PbcWrite(messageCode); if (writeResult.IsSuccess) { return PbcRead(expectedMessageCode); } return writeResult; } public RiakResult<IEnumerable<RiakResult<TResult>>> PbcWriteRead<TRequest, TResult>( TRequest request, Func<RiakResult<TResult>, bool> repeatRead) where TRequest : class where TResult : class, new() { var writeResult = PbcWrite(request); if (writeResult.IsSuccess) { return PbcRepeatRead(repeatRead); } return new RiakResult<IEnumerable<RiakResult<TResult>>>(writeResult); } public RiakResult<IEnumerable<RiakResult<TResult>>> PbcWriteRead<TResult>( MessageCode messageCode, Func<RiakResult<TResult>, bool> repeatRead) where TResult : class, new() { var writeResult = PbcWrite(messageCode); if (writeResult.IsSuccess) { return PbcRepeatRead(repeatRead); } return new RiakResult<IEnumerable<RiakResult<TResult>>>(writeResult); } public RiakResult<IEnumerable<RiakResult<TResult>>> PbcStreamRead<TResult>(Func<RiakResult<TResult>, bool> repeatRead, Action onFinish) where TResult : class, new() { var streamer = PbcStreamReadIterator(repeatRead, onFinish); return RiakResult<IEnumerable<RiakResult<TResult>>>.Success(streamer); } public RiakResult<IEnumerable<RiakResult<TResult>>> PbcWriteStreamRead<TRequest, TResult>( TRequest request, Func<RiakResult<TResult>, bool> repeatRead, Action onFinish) where TRequest : class where TResult : class, new() { var streamer = PbcWriteStreamReadIterator(request, repeatRead, onFinish); return RiakResult<IEnumerable<RiakResult<TResult>>>.Success(streamer); } public RiakResult<IEnumerable<RiakResult<TResult>>> PbcWriteStreamRead<TResult>( MessageCode messageCode, Func<RiakResult<TResult>, bool> repeatRead, Action onFinish) where TResult : class, new() { var streamer = PbcWriteStreamReadIterator(messageCode, repeatRead, onFinish); return RiakResult<IEnumerable<RiakResult<TResult>>>.Success(streamer); } public RiakResult Execute(IRiakCommand command) { RiakResult executeResult = DoExecute(() => socket.Write(command)); if (executeResult.IsSuccess) { executeResult = DoExecute(() => socket.Read(command)); } return executeResult; } public void Dispose() { socket.Dispose(); Disconnect(); } public void Disconnect() { socket.Disconnect(); } private RiakResult DoExecute(Func<RiakResult> socketFunc) { try { return socketFunc(); } catch (RiakException ex) { if (ex.NodeOffline) { Disconnect(); } return RiakResult.FromException(ResultCode.CommunicationError, ex, ex.NodeOffline); } catch (Exception ex) { Disconnect(); return RiakResult.FromException(ResultCode.CommunicationError, ex, true); } } private IEnumerable<RiakResult<TResult>> PbcWriteStreamReadIterator<TResult>( MessageCode messageCode, Func<RiakResult<TResult>, bool> repeatRead, Action onFinish) where TResult : class, new() { var writeResult = PbcWrite(messageCode); if (writeResult.IsSuccess) { return PbcStreamReadIterator(repeatRead, onFinish); } onFinish(); return new[] { new RiakResult<TResult>(writeResult) }; } private IEnumerable<RiakResult<TResult>> PbcStreamReadIterator<TResult>(Func<RiakResult<TResult>, bool> repeatRead, Action onFinish) where TResult : class, new() { RiakResult<TResult> result; do { result = PbcRead<TResult>(); if (!result.IsSuccess) { break; } yield return result; } while (repeatRead(result)); // clean up first.. onFinish(); // then return the failure to the client to indicate failure yield return result; } private IEnumerable<RiakResult<TResult>> PbcWriteStreamReadIterator<TRequest, TResult>( TRequest request, Func<RiakResult<TResult>, bool> repeatRead, Action onFinish) where TRequest : class where TResult : class, new() { var writeResult = PbcWrite(request); if (writeResult.IsSuccess) { return PbcStreamReadIterator(repeatRead, onFinish); } onFinish(); return new[] { new RiakResult<TResult>(writeResult) }; } } }
namespace XenAdmin.Wizards.NewSRWizard_Pages.Frontends { partial class LVMoISCSI { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(LVMoISCSI)); this.textBoxIscsiPort = new System.Windows.Forms.TextBox(); this.labelColon = new System.Windows.Forms.Label(); this.toolTipContainerIQNscan = new XenAdmin.Controls.ToolTipContainer(); this.scanTargetHostButton = new System.Windows.Forms.Button(); this.IscsiUseChapCheckBox = new System.Windows.Forms.CheckBox(); this.comboBoxIscsiIqns = new System.Windows.Forms.ComboBox(); this.comboBoxIscsiLuns = new System.Windows.Forms.ComboBox(); this.errorLabelAtHostname = new System.Windows.Forms.Label(); this.lunInUseLabel = new System.Windows.Forms.Label(); this.targetLunLabel = new System.Windows.Forms.Label(); this.IScsiChapSecretLabel = new System.Windows.Forms.Label(); this.IScsiChapSecretTextBox = new System.Windows.Forms.TextBox(); this.labelCHAPuser = new System.Windows.Forms.Label(); this.IScsiChapUserTextBox = new System.Windows.Forms.TextBox(); this.labelIscsiTargetHost = new System.Windows.Forms.Label(); this.labelIscsiIQN = new System.Windows.Forms.Label(); this.textBoxIscsiHost = new System.Windows.Forms.TextBox(); this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel(); this.placeHolderLabel2 = new System.Windows.Forms.Label(); this.placeholderLabel = new System.Windows.Forms.Label(); this.errorLabelAtCHAPPassword = new System.Windows.Forms.Label(); this.errorIconAtCHAPPassword = new System.Windows.Forms.PictureBox(); this.errorIconAtHostOrIP = new System.Windows.Forms.PictureBox(); this.label11 = new System.Windows.Forms.Label(); this.spinnerIconAtScanTargetHostButton = new XenAdmin.Controls.SpinnerIcon(); this.iSCSITargetGroupBox = new XenAdmin.Controls.DecentGroupBox(); this.tableLayoutPanel2 = new System.Windows.Forms.TableLayoutPanel(); this.errorLabelAtTargetLUN = new System.Windows.Forms.Label(); this.errorIconAtTargetLUN = new System.Windows.Forms.PictureBox(); this.spinnerIconAtTargetIqn = new XenAdmin.Controls.SpinnerIcon(); this.spinnerIconAtTargetLun = new XenAdmin.Controls.SpinnerIcon(); this.tableLayoutPanel1.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.errorIconAtCHAPPassword)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.errorIconAtHostOrIP)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.spinnerIconAtScanTargetHostButton)).BeginInit(); this.iSCSITargetGroupBox.SuspendLayout(); this.tableLayoutPanel2.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.errorIconAtTargetLUN)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.spinnerIconAtTargetIqn)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.spinnerIconAtTargetLun)).BeginInit(); this.SuspendLayout(); // // textBoxIscsiPort // resources.ApplyResources(this.textBoxIscsiPort, "textBoxIscsiPort"); this.textBoxIscsiPort.Name = "textBoxIscsiPort"; this.textBoxIscsiPort.TextChanged += new System.EventHandler(this.textBoxIscsiHost_TextChanged); // // labelColon // resources.ApplyResources(this.labelColon, "labelColon"); this.labelColon.Name = "labelColon"; // // toolTipContainerIQNscan // resources.ApplyResources(this.toolTipContainerIQNscan, "toolTipContainerIQNscan"); this.toolTipContainerIQNscan.Name = "toolTipContainerIQNscan"; // // scanTargetHostButton // resources.ApplyResources(this.scanTargetHostButton, "scanTargetHostButton"); this.tableLayoutPanel1.SetColumnSpan(this.scanTargetHostButton, 2); this.scanTargetHostButton.Name = "scanTargetHostButton"; this.scanTargetHostButton.Click += new System.EventHandler(this.scanTargetHostButton_Click); // // IscsiUseChapCheckBox // resources.ApplyResources(this.IscsiUseChapCheckBox, "IscsiUseChapCheckBox"); this.tableLayoutPanel1.SetColumnSpan(this.IscsiUseChapCheckBox, 2); this.IscsiUseChapCheckBox.Name = "IscsiUseChapCheckBox"; this.IscsiUseChapCheckBox.UseVisualStyleBackColor = true; this.IscsiUseChapCheckBox.CheckedChanged += new System.EventHandler(this.IscsiUseChapCheckBox_CheckedChanged); // // comboBoxIscsiIqns // resources.ApplyResources(this.comboBoxIscsiIqns, "comboBoxIscsiIqns"); this.tableLayoutPanel2.SetColumnSpan(this.comboBoxIscsiIqns, 2); this.comboBoxIscsiIqns.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBoxIscsiIqns.FormattingEnabled = true; this.comboBoxIscsiIqns.Name = "comboBoxIscsiIqns"; this.comboBoxIscsiIqns.SelectedIndexChanged += new System.EventHandler(this.IScsiTargetIqnComboBox_SelectedIndexChanged); // // comboBoxIscsiLuns // resources.ApplyResources(this.comboBoxIscsiLuns, "comboBoxIscsiLuns"); this.tableLayoutPanel2.SetColumnSpan(this.comboBoxIscsiLuns, 2); this.comboBoxIscsiLuns.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBoxIscsiLuns.FormattingEnabled = true; this.comboBoxIscsiLuns.Name = "comboBoxIscsiLuns"; this.comboBoxIscsiLuns.SelectedIndexChanged += new System.EventHandler(this.comboBoxIscsiLuns_SelectedIndexChanged); // // errorLabelAtHostname // resources.ApplyResources(this.errorLabelAtHostname, "errorLabelAtHostname"); this.tableLayoutPanel1.SetColumnSpan(this.errorLabelAtHostname, 3); this.errorLabelAtHostname.ForeColor = System.Drawing.Color.Red; this.errorLabelAtHostname.Name = "errorLabelAtHostname"; // // lunInUseLabel // resources.ApplyResources(this.lunInUseLabel, "lunInUseLabel"); this.lunInUseLabel.Name = "lunInUseLabel"; // // targetLunLabel // resources.ApplyResources(this.targetLunLabel, "targetLunLabel"); this.targetLunLabel.Name = "targetLunLabel"; // // IScsiChapSecretLabel // resources.ApplyResources(this.IScsiChapSecretLabel, "IScsiChapSecretLabel"); this.IScsiChapSecretLabel.BackColor = System.Drawing.Color.Transparent; this.IScsiChapSecretLabel.Name = "IScsiChapSecretLabel"; // // IScsiChapSecretTextBox // this.tableLayoutPanel1.SetColumnSpan(this.IScsiChapSecretTextBox, 2); resources.ApplyResources(this.IScsiChapSecretTextBox, "IScsiChapSecretTextBox"); this.IScsiChapSecretTextBox.Name = "IScsiChapSecretTextBox"; this.IScsiChapSecretTextBox.UseSystemPasswordChar = true; this.IScsiChapSecretTextBox.TextChanged += new System.EventHandler(this.ChapSettings_Changed); // // labelCHAPuser // resources.ApplyResources(this.labelCHAPuser, "labelCHAPuser"); this.labelCHAPuser.BackColor = System.Drawing.Color.Transparent; this.labelCHAPuser.Name = "labelCHAPuser"; // // IScsiChapUserTextBox // this.IScsiChapUserTextBox.AllowDrop = true; this.tableLayoutPanel1.SetColumnSpan(this.IScsiChapUserTextBox, 2); resources.ApplyResources(this.IScsiChapUserTextBox, "IScsiChapUserTextBox"); this.IScsiChapUserTextBox.Name = "IScsiChapUserTextBox"; this.IScsiChapUserTextBox.TextChanged += new System.EventHandler(this.ChapSettings_Changed); // // labelIscsiTargetHost // resources.ApplyResources(this.labelIscsiTargetHost, "labelIscsiTargetHost"); this.labelIscsiTargetHost.BackColor = System.Drawing.Color.Transparent; this.tableLayoutPanel1.SetColumnSpan(this.labelIscsiTargetHost, 2); this.labelIscsiTargetHost.Name = "labelIscsiTargetHost"; // // labelIscsiIQN // resources.ApplyResources(this.labelIscsiIQN, "labelIscsiIQN"); this.labelIscsiIQN.BackColor = System.Drawing.Color.Transparent; this.labelIscsiIQN.Name = "labelIscsiIQN"; // // textBoxIscsiHost // this.tableLayoutPanel1.SetColumnSpan(this.textBoxIscsiHost, 2); resources.ApplyResources(this.textBoxIscsiHost, "textBoxIscsiHost"); this.textBoxIscsiHost.Name = "textBoxIscsiHost"; this.textBoxIscsiHost.TextChanged += new System.EventHandler(this.textBoxIscsiHost_TextChanged); // // tableLayoutPanel1 // resources.ApplyResources(this.tableLayoutPanel1, "tableLayoutPanel1"); this.tableLayoutPanel1.Controls.Add(this.placeHolderLabel2, 0, 7); this.tableLayoutPanel1.Controls.Add(this.placeholderLabel, 0, 2); this.tableLayoutPanel1.Controls.Add(this.errorLabelAtCHAPPassword, 3, 7); this.tableLayoutPanel1.Controls.Add(this.errorIconAtCHAPPassword, 2, 7); this.tableLayoutPanel1.Controls.Add(this.errorIconAtHostOrIP, 2, 2); this.tableLayoutPanel1.Controls.Add(this.scanTargetHostButton, 0, 8); this.tableLayoutPanel1.Controls.Add(this.labelIscsiTargetHost, 0, 1); this.tableLayoutPanel1.Controls.Add(this.textBoxIscsiHost, 2, 1); this.tableLayoutPanel1.Controls.Add(this.labelColon, 3, 1); this.tableLayoutPanel1.Controls.Add(this.textBoxIscsiPort, 4, 1); this.tableLayoutPanel1.Controls.Add(this.IScsiChapUserTextBox, 2, 5); this.tableLayoutPanel1.Controls.Add(this.IScsiChapSecretTextBox, 2, 6); this.tableLayoutPanel1.Controls.Add(this.labelCHAPuser, 1, 5); this.tableLayoutPanel1.Controls.Add(this.IScsiChapSecretLabel, 1, 6); this.tableLayoutPanel1.Controls.Add(this.IscsiUseChapCheckBox, 0, 3); this.tableLayoutPanel1.Controls.Add(this.label11, 0, 0); this.tableLayoutPanel1.Controls.Add(this.errorLabelAtHostname, 3, 2); this.tableLayoutPanel1.Controls.Add(this.spinnerIconAtScanTargetHostButton, 2, 8); this.tableLayoutPanel1.Name = "tableLayoutPanel1"; // // placeHolderLabel2 // resources.ApplyResources(this.placeHolderLabel2, "placeHolderLabel2"); this.placeHolderLabel2.ForeColor = System.Drawing.Color.Red; this.placeHolderLabel2.Name = "placeHolderLabel2"; // // placeholderLabel // resources.ApplyResources(this.placeholderLabel, "placeholderLabel"); this.placeholderLabel.ForeColor = System.Drawing.Color.Red; this.placeholderLabel.Name = "placeholderLabel"; // // errorLabelAtCHAPPassword // resources.ApplyResources(this.errorLabelAtCHAPPassword, "errorLabelAtCHAPPassword"); this.tableLayoutPanel1.SetColumnSpan(this.errorLabelAtCHAPPassword, 3); this.errorLabelAtCHAPPassword.ForeColor = System.Drawing.Color.Red; this.errorLabelAtCHAPPassword.Name = "errorLabelAtCHAPPassword"; // // errorIconAtCHAPPassword // resources.ApplyResources(this.errorIconAtCHAPPassword, "errorIconAtCHAPPassword"); this.errorIconAtCHAPPassword.ErrorImage = null; this.errorIconAtCHAPPassword.InitialImage = null; this.errorIconAtCHAPPassword.Name = "errorIconAtCHAPPassword"; this.errorIconAtCHAPPassword.TabStop = false; // // errorIconAtHostOrIP // resources.ApplyResources(this.errorIconAtHostOrIP, "errorIconAtHostOrIP"); this.errorIconAtHostOrIP.ErrorImage = null; this.errorIconAtHostOrIP.InitialImage = null; this.errorIconAtHostOrIP.Name = "errorIconAtHostOrIP"; this.errorIconAtHostOrIP.TabStop = false; // // label11 // resources.ApplyResources(this.label11, "label11"); this.tableLayoutPanel1.SetColumnSpan(this.label11, 6); this.label11.Name = "label11"; // // spinnerIconAtScanTargetHostButton // resources.ApplyResources(this.spinnerIconAtScanTargetHostButton, "spinnerIconAtScanTargetHostButton"); this.spinnerIconAtScanTargetHostButton.Name = "spinnerIconAtScanTargetHostButton"; this.spinnerIconAtScanTargetHostButton.SucceededImage = global::XenAdmin.Properties.Resources._000_Tick_h32bit_16; this.spinnerIconAtScanTargetHostButton.TabStop = false; // // iSCSITargetGroupBox // resources.ApplyResources(this.iSCSITargetGroupBox, "iSCSITargetGroupBox"); this.iSCSITargetGroupBox.Controls.Add(this.tableLayoutPanel2); this.iSCSITargetGroupBox.Name = "iSCSITargetGroupBox"; this.iSCSITargetGroupBox.TabStop = false; // // tableLayoutPanel2 // resources.ApplyResources(this.tableLayoutPanel2, "tableLayoutPanel2"); this.tableLayoutPanel2.Controls.Add(this.labelIscsiIQN, 0, 0); this.tableLayoutPanel2.Controls.Add(this.comboBoxIscsiIqns, 1, 0); this.tableLayoutPanel2.Controls.Add(this.comboBoxIscsiLuns, 1, 1); this.tableLayoutPanel2.Controls.Add(this.targetLunLabel, 0, 1); this.tableLayoutPanel2.Controls.Add(this.errorLabelAtTargetLUN, 2, 2); this.tableLayoutPanel2.Controls.Add(this.errorIconAtTargetLUN, 1, 2); this.tableLayoutPanel2.Controls.Add(this.spinnerIconAtTargetIqn, 3, 0); this.tableLayoutPanel2.Controls.Add(this.spinnerIconAtTargetLun, 3, 1); this.tableLayoutPanel2.Name = "tableLayoutPanel2"; // // errorLabelAtTargetLUN // resources.ApplyResources(this.errorLabelAtTargetLUN, "errorLabelAtTargetLUN"); this.tableLayoutPanel2.SetColumnSpan(this.errorLabelAtTargetLUN, 2); this.errorLabelAtTargetLUN.ForeColor = System.Drawing.Color.Red; this.errorLabelAtTargetLUN.Name = "errorLabelAtTargetLUN"; // // errorIconAtTargetLUN // resources.ApplyResources(this.errorIconAtTargetLUN, "errorIconAtTargetLUN"); this.errorIconAtTargetLUN.ErrorImage = null; this.errorIconAtTargetLUN.InitialImage = null; this.errorIconAtTargetLUN.Name = "errorIconAtTargetLUN"; this.errorIconAtTargetLUN.TabStop = false; // // spinnerIconAtTargetIqn // resources.ApplyResources(this.spinnerIconAtTargetIqn, "spinnerIconAtTargetIqn"); this.spinnerIconAtTargetIqn.Name = "spinnerIconAtTargetIqn"; this.spinnerIconAtTargetIqn.SucceededImage = global::XenAdmin.Properties.Resources._000_Tick_h32bit_16; this.spinnerIconAtTargetIqn.TabStop = false; // // spinnerIconAtTargetLun // resources.ApplyResources(this.spinnerIconAtTargetLun, "spinnerIconAtTargetLun"); this.spinnerIconAtTargetLun.Name = "spinnerIconAtTargetLun"; this.spinnerIconAtTargetLun.SucceededImage = global::XenAdmin.Properties.Resources._000_Tick_h32bit_16; this.spinnerIconAtTargetLun.TabStop = false; // // LVMoISCSI // resources.ApplyResources(this, "$this"); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi; this.Controls.Add(this.iSCSITargetGroupBox); this.Controls.Add(this.tableLayoutPanel1); this.Controls.Add(this.toolTipContainerIQNscan); this.Name = "LVMoISCSI"; this.tableLayoutPanel1.ResumeLayout(false); this.tableLayoutPanel1.PerformLayout(); ((System.ComponentModel.ISupportInitialize)(this.errorIconAtCHAPPassword)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.errorIconAtHostOrIP)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.spinnerIconAtScanTargetHostButton)).EndInit(); this.iSCSITargetGroupBox.ResumeLayout(false); this.tableLayoutPanel2.ResumeLayout(false); this.tableLayoutPanel2.PerformLayout(); ((System.ComponentModel.ISupportInitialize)(this.errorIconAtTargetLUN)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.spinnerIconAtTargetIqn)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.spinnerIconAtTargetLun)).EndInit(); this.ResumeLayout(false); this.PerformLayout(); } #endregion private System.Windows.Forms.TextBox textBoxIscsiPort; private System.Windows.Forms.Label labelColon; private XenAdmin.Controls.ToolTipContainer toolTipContainerIQNscan; private System.Windows.Forms.Button scanTargetHostButton; private System.Windows.Forms.CheckBox IscsiUseChapCheckBox; private System.Windows.Forms.ComboBox comboBoxIscsiIqns; private System.Windows.Forms.ComboBox comboBoxIscsiLuns; private System.Windows.Forms.Label errorLabelAtHostname; private System.Windows.Forms.Label lunInUseLabel; private System.Windows.Forms.Label targetLunLabel; private System.Windows.Forms.Label IScsiChapSecretLabel; private System.Windows.Forms.TextBox IScsiChapSecretTextBox; private System.Windows.Forms.Label labelCHAPuser; private System.Windows.Forms.TextBox IScsiChapUserTextBox; private System.Windows.Forms.Label labelIscsiTargetHost; private System.Windows.Forms.Label labelIscsiIQN; private System.Windows.Forms.TextBox textBoxIscsiHost; private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1; private XenAdmin.Controls.DecentGroupBox iSCSITargetGroupBox; private System.Windows.Forms.Label label11; private System.Windows.Forms.TableLayoutPanel tableLayoutPanel2; private System.Windows.Forms.PictureBox errorIconAtTargetLUN; private System.Windows.Forms.PictureBox errorIconAtHostOrIP; private System.Windows.Forms.PictureBox errorIconAtCHAPPassword; private System.Windows.Forms.Label errorLabelAtCHAPPassword; private System.Windows.Forms.Label errorLabelAtTargetLUN; private XenAdmin.Controls.SpinnerIcon spinnerIconAtTargetIqn; private XenAdmin.Controls.SpinnerIcon spinnerIconAtTargetLun; private XenAdmin.Controls.SpinnerIcon spinnerIconAtScanTargetHostButton; private System.Windows.Forms.Label placeholderLabel; private System.Windows.Forms.Label placeHolderLabel2; } }
using JeffFerguson.Gepsio.Xml.Interfaces; namespace JeffFerguson.Gepsio { /// <summary> /// An encapsulation of the XBRL element "context" as defined in the http://www.xbrl.org/2003/instance namespace. /// </summary> public class Context { private INode thisContextNode; private INode thisInstantPeriodNode; private bool thisDurationPeriod; private INode thisStartDateDurationNode; private INode thisEndDateDurationNode; /// <summary> /// The ID of this context. /// </summary> public string Id { get; private set; } /// <summary> /// Describes whether or not this context uses an instant period. Returns true if this context uses an instant /// period. Returns false is this context does not use an instant period. /// </summary> public bool InstantPeriod { get; private set; } /// <summary> /// Describes whether or not this context uses a duration period. Returns true if this context uses a duration /// period. Returns false is this context does not use a duration period. /// </summary> public bool DurationPeriod { get { if ((this.ForeverPeriod == true) || (thisDurationPeriod == true)) return true; return false; } } /// <summary> /// Describes whether or not this context uses a forever period. Returns true if this context uses a forever /// period. Returns false is this context does not use a forever period. /// </summary> public bool ForeverPeriod { get; private set; } /// <summary> /// The identifier for this context. /// </summary> public string Identifier { get; private set; } /// <summary> /// The identifier scheme for this context. /// </summary> public string IdentifierScheme { get; private set; } /// <summary> /// The segment node defined for this context. If this context was not marked up with a segment node, then /// this property will return null. /// </summary> public INode Segment { get; private set; } /// <summary> /// The scenario node defined for this context. If this context was not marked up with a scenario node, then /// this property will return null. /// </summary> internal INode Scenario { get; private set; } /// <summary> /// A reference to the <see cref="XbrlFragment"/> in which this context is found. /// </summary> public XbrlFragment Fragment { get; private set; } /// <summary> /// The start date of the period of this context. /// </summary> /// <remarks> /// This value of this property should be considered valid only if this context uses a duration period. /// This can be checked using the context's <see cref="DurationPeriod"/> property: /// <code> /// var myDoc = new XbrlDocument(); /// myDoc.Load("MyXbrlDoc.xml"); /// foreach(var currentFragment in myDoc.Fragments) /// { /// foreach(var currentContext in currentFragment.Contexts) /// { /// if(currentContext.DurationPeriod == true) /// { /// // value of currentContext.PeriodStartDate is valid /// } /// else /// { /// // value of currentContext.PeriodStartDate is undefined /// } /// } /// } /// </code> /// </remarks> public System.DateTime PeriodStartDate { get; private set; } /// <summary> /// The end date of the period of this context. /// </summary> /// <remarks> /// This value of this property should be considered valid only if this context uses a duration period. /// This can be checked using the context's <see cref="DurationPeriod"/> property: /// <code> /// var myDoc = new XbrlDocument(); /// myDoc.Load("MyXbrlDoc.xml"); /// foreach(var currentFragment in myDoc.Fragments) /// { /// foreach(var currentContext in currentFragment.Contexts) /// { /// if(currentContext.DurationPeriod == true) /// { /// // value of currentContext.PeriodEndDate is valid /// } /// else /// { /// // value of currentContext.PeriodEndDate is undefined /// } /// } /// } /// </code> /// </remarks> public System.DateTime PeriodEndDate { get; private set; } /// <summary> /// The date of the instant of this context. /// </summary> /// <remarks> /// This value of this property should be considered valid only if this context uses an instant period. /// This can be checked using the context's <see cref="InstantPeriod"/> property: /// <code> /// var myDoc = new XbrlDocument(); /// myDoc.Load("MyXbrlDoc.xml"); /// foreach(var currentFragment in myDoc.Fragments) /// { /// foreach(var currentContext in currentFragment.Contexts) /// { /// if(currentContext.InstantPeriod == true) /// { /// // value of currentContext.InstantDate is valid /// } /// else /// { /// // value of currentContext.InstantDate is undefined /// } /// } /// } /// </code> /// </remarks> public System.DateTime InstantDate { get; private set; } internal Context(XbrlFragment Fragment, INode ContextNode) { this.Fragment = Fragment; thisContextNode = ContextNode; this.Id = thisContextNode.Attributes.FindAttribute("id").Value; this.PeriodStartDate = System.DateTime.MinValue; this.PeriodEndDate = System.DateTime.MinValue; foreach (INode CurrentChild in thisContextNode.ChildNodes) { if (CurrentChild.LocalName.Equals("period") == true) ProcessPeriod(CurrentChild); else if (CurrentChild.LocalName.Equals("entity") == true) ProcessEntity(CurrentChild); else if (CurrentChild.LocalName.Equals("scenario") == true) ProcessScenario(CurrentChild); } } private void ProcessEntity(INode EntityNode) { this.Identifier = string.Empty; this.IdentifierScheme = string.Empty; this.Segment = null; this.Scenario = null; foreach (INode CurrentChild in EntityNode.ChildNodes) { if (CurrentChild.LocalName.Equals("identifier") == true) ProcessIdentifier(CurrentChild); else if (CurrentChild.LocalName.Equals("segment") == true) ProcessSegment(CurrentChild); } } private void ProcessScenario(INode ScenarioNode) { this.Scenario = ScenarioNode; } private void ProcessSegment(INode SegmentNode) { this.Segment = SegmentNode; } private void ProcessIdentifier(INode IdentifierNode) { this.Identifier = IdentifierNode.InnerText; if (IdentifierNode.Attributes["scheme"] != null) this.IdentifierScheme = IdentifierNode.Attributes["scheme"].Value; } private void ProcessPeriod(INode PeriodNode) { this.InstantPeriod = false; thisInstantPeriodNode = null; this.ForeverPeriod = false; thisDurationPeriod = false; thisStartDateDurationNode = null; thisEndDateDurationNode = null; foreach (INode CurrentChild in PeriodNode.ChildNodes) { if (CurrentChild.LocalName.Equals("instant") == true) { this.InstantPeriod = true; thisInstantPeriodNode = CurrentChild; var parsedInstantDate = System.DateTime.MinValue; System.DateTime.TryParse(thisInstantPeriodNode.InnerText, out parsedInstantDate); this.InstantDate = parsedInstantDate; } else if (CurrentChild.LocalName.Equals("forever") == true) this.ForeverPeriod = true; else if (CurrentChild.LocalName.Equals("startDate") == true) { thisDurationPeriod = true; thisStartDateDurationNode = CurrentChild; var parsedStartDate = System.DateTime.MinValue; System.DateTime.TryParse(thisStartDateDurationNode.InnerText, out parsedStartDate); this.PeriodStartDate = parsedStartDate; } else if (CurrentChild.LocalName.Equals("endDate") == true) { thisEndDateDurationNode = CurrentChild; var parsedEndDate = System.DateTime.MinValue; System.DateTime.TryParse(thisEndDateDurationNode.InnerText, out parsedEndDate); this.PeriodEndDate = parsedEndDate; } } } //------------------------------------------------------------------------------------ // Returns true if this context is Structure Equal (s-equal) to a supplied context, // and false otherwise. See section 4.10 of the XBRL 2.1 spec for more information. //------------------------------------------------------------------------------------ internal bool StructureEquals(Context OtherContext, XbrlFragment containingFragment) { if (PeriodStructureEquals(OtherContext) == false) return false; if (EntityStructureEquals(OtherContext, containingFragment) == false) return false; if (ScenarioStructureEquals(OtherContext, containingFragment) == false) return false; return true; } //------------------------------------------------------------------------------------ //------------------------------------------------------------------------------------ private bool ScenarioStructureEquals(Context OtherContext, XbrlFragment containingFragment) { if ((this.Scenario == null) && (OtherContext.Scenario == null)) return true; if ((this.Scenario == null) && (OtherContext.Scenario != null)) return false; if ((this.Scenario != null) && (OtherContext.Scenario == null)) return false; return this.Scenario.StructureEquals(OtherContext.Scenario, containingFragment); } //------------------------------------------------------------------------------------ //------------------------------------------------------------------------------------ private bool EntityStructureEquals(Context OtherContext, XbrlFragment containingFragment) { if (this.Identifier.Equals(OtherContext.Identifier) == false) return false; if (SegmentStructureEquals(OtherContext, containingFragment) == false) return false; return true; } //------------------------------------------------------------------------------------ //------------------------------------------------------------------------------------ private bool SegmentStructureEquals(Context OtherContext, XbrlFragment containingFragment) { //-------------------------------------------------------------------------------- // If neither context has a <segment> node, then the segments are considered // equal. //-------------------------------------------------------------------------------- if ((this.Segment == null) && (OtherContext.Segment == null)) return true; //-------------------------------------------------------------------------------- // If this context does not have a <segment> node, but the other one does, then // the segments are equal only if the other <segment> is empty. //-------------------------------------------------------------------------------- if ((this.Segment == null) && (OtherContext.Segment != null)) { if (OtherContext.Segment.ChildNodes.Count > 0) return false; return true; } //-------------------------------------------------------------------------------- // If the other context does not have a <segment> node, but this one does, then // the segments are equal only if this <segment> is empty. //-------------------------------------------------------------------------------- if ((this.Segment != null) && (OtherContext.Segment == null)) { if (this.Segment.ChildNodes.Count > 0) return false; return true; } //-------------------------------------------------------------------------------- // At this point, both segments exist. Check to see if they have the same // structure. //-------------------------------------------------------------------------------- return this.Segment.StructureEquals(OtherContext.Segment, containingFragment); } /// <summary> /// Compares the period type of this context with the period type of another context. /// </summary> /// <param name="OtherContext"> /// The other context with which types should be compared. /// </param> /// <returns> /// True if the two contexts have the same period type; false otherwise. /// </returns> internal bool PeriodTypeEquals(Context OtherContext) { if (this.ForeverPeriod != OtherContext.ForeverPeriod) return false; if (this.InstantPeriod != OtherContext.InstantPeriod) return false; if (this.DurationPeriod != OtherContext.DurationPeriod) return false; return true; } //------------------------------------------------------------------------------------ //------------------------------------------------------------------------------------ private bool PeriodStructureEquals(Context OtherContext) { if (PeriodTypeEquals(OtherContext) == false) return false; if (InstantPeriod == true) { if (this.InstantDate != OtherContext.InstantDate) return false; } if (DurationPeriod == true) { if (this.PeriodStartDate != OtherContext.PeriodStartDate) return false; if (this.PeriodEndDate != OtherContext.PeriodEndDate) return false; } return true; } } }
/* Copyright (C) 2013-2015 MetaMorph Software, Inc Permission is hereby granted, free of charge, to any person obtaining a copy of this data, including any software or models in source or binary form, as well as any drawings, specifications, and documentation (collectively "the Data"), to deal in the Data without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Data, and to permit persons to whom the Data is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Data. THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA. ======================= This version of the META tools is a fork of an original version produced by Vanderbilt University's Institute for Software Integrated Systems (ISIS). Their license statement: Copyright (C) 2011-2014 Vanderbilt University Developed with the sponsorship of the Defense Advanced Research Projects Agency (DARPA) and delivered to the U.S. Government with Unlimited Rights as defined in DFARS 252.227-7013. Permission is hereby granted, free of charge, to any person obtaining a copy of this data, including any software or models in source or binary form, as well as any drawings, specifications, and documentation (collectively "the Data"), to deal in the Data without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Data, and to permit persons to whom the Data is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Data. THE DATA IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS, SPONSORS, DEVELOPERS, CONTRIBUTORS, OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE DATA OR THE USE OR OTHER DEALINGS IN THE DATA. */ namespace CyPhy2Modelica_v2 { using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Runtime.InteropServices; using System.Text; using System.Windows.Forms; using CyPhyGUIs; using GME.CSharp; using GME.MGA; using GME.MGA.Core; using CyPhy = ISIS.GME.Dsml.CyPhyML.Interfaces; using CyPhyClasses = ISIS.GME.Dsml.CyPhyML.Classes; /// <summary> /// This class implements the necessary COM interfaces for a GME interpreter component. /// </summary> [Guid(ComponentConfig.guid), ProgId(ComponentConfig.progID), ClassInterface(ClassInterfaceType.AutoDual)] [ComVisible(true)] public class CyPhy2Modelica_v2Interpreter : IMgaComponentEx, IGMEVersionInfo, ICyPhyInterpreter { /// <summary> /// Contains information about the GUI event that initiated the invocation. /// </summary> [ComVisible(false)] public enum ComponentStartMode { GME_MAIN_START = 0, // Not used by GME GME_BROWSER_START = 1, // Right click in the GME Tree Browser window GME_CONTEXT_START = 2, // Using the context menu by right clicking a model element in the GME modeling window GME_EMBEDDED_START = 3, // Not used by GME GME_MENU_START = 16, // Clicking on the toolbar icon, or using the main menu GME_BGCONTEXT_START = 18, // Using the context menu by right clicking the background of the GME modeling window GME_ICON_START = 32, // Not used by GME GME_SILENT_MODE = 128 // Not used by GME, available to testers not using GME } /// <summary> /// This function is called for each interpreter invocation before Main. /// Don't perform MGA operations here unless you open a tansaction. /// </summary> /// <param name="project">The handle of the project opened in GME, for which the interpreter was called.</param> public void Initialize(MgaProject project) { //GMEConsole = GMEConsole.CreateFromProject(project); MgaGateway = new MgaGateway(project); project.CreateTerritoryWithoutSink(out MgaGateway.territory); } #region IMgaComponentEx Members private MgaGateway MgaGateway { get; set; } public void InvokeEx(MgaProject project, MgaFCO currentobj, MgaFCOs selectedobjs, int param) { if (this.enabled == false) { return; } try { this.Logger = new CyPhyGUIs.GMELogger(project, this.ComponentName); // Need to call this interpreter in the same way as the MasterInterpreter will call it. // initialize main parameters if (currentobj == null) { this.Logger.WriteError("CyPhy2Modelica_v2 must be invoked on a Test Bench."); return; } var parameters = new InterpreterMainParameters() { Project = project, CurrentFCO = currentobj, SelectedFCOs = selectedobjs, StartModeParam = param, VerboseConsole = true }; this.mainParameters = parameters; parameters.ProjectDirectory = Path.GetDirectoryName(currentobj.Project.ProjectConnStr.Substring("MGA=".Length)); // set up the output directory MgaGateway.PerformInTransaction(delegate { string outputDirName = project.Name; if (currentobj != null) { outputDirName = currentobj.Name; } parameters.OutputDirectory = Path.GetFullPath(Path.Combine( parameters.ProjectDirectory, "results", outputDirName)); //this.Parameters.PackageName = Modelica.Factory.GetModifiedName(currentobj.Name); }); PreConfigArgs preConfigArgs = new PreConfigArgs(); preConfigArgs.ProjectDirectory = parameters.ProjectDirectory; // call the preconfiguration with no parameters and get preconfig var preConfig = this.PreConfig(preConfigArgs); // get previous GUI config var previousConfig = META.ComComponent.DeserializeConfiguration( parameters.ProjectDirectory, typeof(CyPhy2Modelica_v2Settings), this.ComponentProgID); // get interpreter config through GUI var config = this.DoGUIConfiguration(preConfig, previousConfig); if (config == null) { this.Logger.WriteWarning("Operation cancelled by the user."); return; } // if config is valid save it and update it on the file system META.ComComponent.SerializeConfiguration(parameters.ProjectDirectory, config, this.ComponentProgID); // assign the new configuration to mainParameters parameters.config = config; // call the main (ICyPhyComponent) function this.Main(parameters); } finally { if (this.Logger != null) { this.Logger.Dispose(); this.Logger = null; } if (MgaGateway != null && MgaGateway.territory != null) { MgaGateway.territory.Destroy(); } MgaGateway = null; project = null; currentobj = null; selectedobjs = null; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); } } #region Component Information public string ComponentName { get { return GetType().Name; } } public string ComponentProgID { get { return ComponentConfig.progID; } } public componenttype_enum ComponentType { get { return ComponentConfig.componentType; } } public string Paradigm { get { return ComponentConfig.paradigmName; } } #endregion #region Enabling bool enabled = true; public void Enable(bool newval) { enabled = newval; } #endregion #region Interactive Mode protected bool interactiveMode = true; public bool InteractiveMode { get { return interactiveMode; } set { interactiveMode = value; } } #endregion #region Custom Parameters SortedDictionary<string, object> componentParameters = null; public object get_ComponentParameter(string Name) { if (Name == "type") return "csharp"; if (Name == "path") return GetType().Assembly.Location; if (Name == "fullname") return GetType().FullName; object value; if (componentParameters != null && componentParameters.TryGetValue(Name, out value)) { return value; } return null; } public void set_ComponentParameter(string Name, object pVal) { if (componentParameters == null) { componentParameters = new SortedDictionary<string, object>(); } componentParameters[Name] = pVal; } #endregion #region Unused Methods // Old interface, it is never called for MgaComponentEx interfaces public void Invoke(MgaProject Project, MgaFCOs selectedobjs, int param) { throw new NotImplementedException(); } // Not used by GME public void ObjectsInvokeEx(MgaProject Project, MgaObject currentobj, MgaObjects selectedobjs, int param) { throw new NotImplementedException(); } #endregion #endregion #region IMgaVersionInfo Members public GMEInterfaceVersion_enum version { get { return GMEInterfaceVersion_enum.GMEInterfaceVersion_Current; } } #endregion #region Registration Helpers [ComRegisterFunctionAttribute] public static void GMERegister(Type t) { Registrar.RegisterComponentsInGMERegistry(); } [ComUnregisterFunctionAttribute] public static void GMEUnRegister(Type t) { Registrar.UnregisterComponentsInGMERegistry(); } #endregion #region Dependent Interpreters private bool CallElaborator( MgaProject project, MgaFCO currentobj, MgaFCOs selectedobjs, int param, bool expand = true) { bool result = false; try { this.Logger.WriteDebug("Elaborating model..."); var elaborator = new CyPhyElaborateCS.CyPhyElaborateCSInterpreter(); elaborator.Initialize(project); int verbosity = 128; if (this.mainParameters.VerboseConsole) { verbosity = 3; } result = elaborator.RunInTransaction(project, currentobj, selectedobjs, verbosity); if (this.result.Traceability == null) { this.result.Traceability = new META.MgaTraceability(); } if (elaborator.Traceability != null) { elaborator.Traceability.CopyTo(this.result.Traceability); } this.Logger.WriteDebug("Elaboration is done."); } catch (Exception ex) { this.Logger.WriteError("Exception occurred in Elaborator : {0}", ex.ToString()); result = false; } return result; } private bool CallCyberInterpreter(CyPhy.CyberModel cyberModel) { bool success = true; string cyberModelPath = string.Empty; // checks if (string.IsNullOrWhiteSpace(cyberModel.Attributes.FileRef)) { this.Logger.WriteError("[Cyber] Model filename attribute is empty: {0}", cyberModel.ToHyperLink()); return false; } if (Path.IsPathRooted(cyberModel.Attributes.FileRef)) { cyberModelPath = cyberModel.Attributes.FileRef; } else { cyberModelPath = Path.Combine(this.mainParameters.ProjectDirectory, cyberModel.Attributes.FileRef); } string cyberModelMgaPath = string.Empty; string cyberModelXmePath = string.Empty; bool requiresImport = false; cyberModelMgaPath = Path.GetFileNameWithoutExtension(cyberModelPath) + ".mga"; cyberModelXmePath = Path.GetFileNameWithoutExtension(cyberModelPath) + ".xme"; if (Path.GetExtension(cyberModelPath) == ".mga") { if (File.Exists(cyberModelMgaPath) == false) { requiresImport = true; if (File.Exists(cyberModelXmePath) == false) { this.Logger.WriteError("[Cyber] Model filename does not exist: {0} {1}", cyberModel.ToHyperLink(), cyberModelPath); return false; } } } else if (Path.GetExtension(cyberModelPath) == ".xme") { requiresImport = true; if (File.Exists(cyberModelXmePath) == false) { this.Logger.WriteError("[Cyber] Model filename does not exist: {0} {1}", cyberModel.ToHyperLink(), cyberModelPath); return false; } } else { this.Logger.WriteError("[Cyber] Model filename attribute has unknown extension (valid: [mga|xme]): {0} {1}", cyberModel.ToHyperLink(), Path.GetExtension(cyberModelPath)); return false; } MgaProject cyberProject = new MgaProject(); if (requiresImport) { // FIXME: this will throw an exception if xme is referenced mga exists and it is being used. MgaUtils.ImportXME(cyberModelXmePath, cyberModelMgaPath); } try { bool ro_mode; // FIXME: any race conditions here??? // FIXME: for SoT we need to copy the referenced xme/mgas cyberProject.Open("MGA=" + cyberModelMgaPath, out ro_mode); string cyberComponentPath = ""; if (cyberModel.Attributes.FilePathWithinResource.Contains('.')) { cyberComponentPath = cyberModel.Attributes.FilePathWithinResource.Substring(cyberModel.Attributes.FilePathWithinResource.IndexOf('.')).Replace(".", "/@"); } this.Logger.WriteInfo("[Cyber] {0} --> {1}", cyberModel.Attributes.FilePathWithinResource, cyberComponentPath); var terr = cyberProject.BeginTransactionInNewTerr(); MgaFCO currentObj = cyberProject.ObjectByPath[cyberComponentPath] as MgaFCO; cyberProject.AbortTransaction(); terr.Destroy(); if (currentObj == null) { this.Logger.WriteError("[Cyber] Referenced cyber object was not found in model: {0} {1} {2}", cyberModel.ToHyperLink(), cyberModelPath, cyberModel.Attributes.FilePathWithinResource); return false; } // Cyber model type and interpreter progid map. Each cyber model type has a different interpreter. Dictionary<CyPhyClasses.CyberModel.AttributesClass.ModelType_enum, string> interpreterMap = new Dictionary<CyPhyClasses.CyberModel.AttributesClass.ModelType_enum, string>() { //{ CyPhyClasses.CyberModel.AttributesClass.ModelType_enum.ESMoL, ""}, //{ CyPhyClasses.CyberModel.AttributesClass.ModelType_enum.SignalFlow, ""}, { CyPhyClasses.CyberModel.AttributesClass.ModelType_enum.Simulink, "MGA.Interpreter.Cyber2SLC_CodeGen" } }; // call appropriate Cyber interpreter Type tCyber = Type.GetTypeFromProgID(interpreterMap[cyberModel.Attributes.ModelType]); if (tCyber == null) { this.Logger.WriteError("[Cyber] Cannot instantiate Cyber interpreter: {0} {1}", cyberModel.ToHyperLink(), interpreterMap[cyberModel.Attributes.ModelType]); } IMgaComponentEx cyberCodeGenerator = Activator.CreateInstance(tCyber) as IMgaComponentEx; cyberCodeGenerator.Initialize(cyberProject); var cyberOutputDir = Path.Combine(this.mainParameters.OutputDirectory, Modelica.CodeGenerator.MainPackage); Directory.CreateDirectory(cyberOutputDir); cyberCodeGenerator.ComponentParameter["output_dir"] = cyberOutputDir; cyberCodeGenerator.ComponentParameter["automation"] = "true"; cyberCodeGenerator.ComponentParameter["console_messages"] = "off"; this.Logger.WriteInfo("Generating code for Cyber [{0}] elements...", cyberModel.Attributes.ModelType); System.Windows.Forms.Application.DoEvents(); MgaFCOs selectedobjs = (MgaFCOs)Activator.CreateInstance(Type.GetTypeFromProgID("Mga.MgaFCOs")); cyberCodeGenerator.InvokeEx(cyberProject, currentObj, selectedobjs, 128); this.Logger.WriteInfo("Cyber [{0}] code generation is done.", cyberModel.Attributes.ModelType); System.Windows.Forms.Application.DoEvents(); } catch (Exception ex) { this.Logger.WriteError("Cyber exception occured: {0}", ex); success = false; } return success; } private bool CallCyber(MgaProject mgaProject, MgaFCO mgaFCO, MgaFCOs mgaFCOs, int param) { var filter = mgaProject.CreateFilter(); filter.Kind = typeof(CyPhy.CyberModel).Name; bool success = true; var cyberModelFcos = (mgaFCO as MgaModel).GetDescendantFCOs(filter); foreach (var cyberModelFco in cyberModelFcos) { success = success && this.CallCyberInterpreter(CyPhyClasses.CyberModel.Cast(cyberModelFco as MgaObject)); } return success; } #endregion #region CyPhyGUIs /// <summary> /// Result of the latest run of this interpreter. /// </summary> private InterpreterResult result = new InterpreterResult(); /// <summary> /// Parameter of this run. /// </summary> private InterpreterMainParameters mainParameters { get; set; } /// <summary> /// Output directory where all files must be generated /// </summary> private string OutputDirectory { get { return this.mainParameters.OutputDirectory; } } private void UpdateSuccess(string message, bool success) { this.result.Success = this.result.Success && success; this.runtime.Enqueue(new Tuple<string, TimeSpan>(message, DateTime.Now - this.startTime)); if (success) { this.Logger.WriteDebug("{0} : OK", message); } else { this.Logger.WriteError("{0} : FAILED", message); } } /// <summary> /// Name of the log file. (It is not a full path) /// </summary> private string LogFileFilename { get; set; } /// <summary> /// Full path to the log file. /// </summary> private string LogFilePath { get { return Path.Combine(this.result.LogFileDirectory, this.LogFileFilename); } } public CyPhyGUIs.GMELogger Logger { get; set; } /// <summary> /// ProgId of the configuration class of this interpreter. /// </summary> public string InterpreterConfigurationProgId { get { return (typeof(CyPhy2Modelica_v2Settings).GetCustomAttributes(typeof(ProgIdAttribute), false)[0] as ProgIdAttribute).Value; } } /// <summary> /// Preconfig gets called first. No transaction is open, but one may be opened. /// In this function model may be processed and some object ids get serialized /// and returned as preconfiguration (project-wise configuration). /// </summary> /// <param name="preConfigParameters"></param> /// <returns>Null if no configuration is required by the DoGUIConfig.</returns> public IInterpreterPreConfiguration PreConfig(IPreConfigParameters preConfigParameters) { var preConfig = new CyPhy2Modelica_v2PreConfiguration() { ProjectDirectory = preConfigParameters.ProjectDirectory }; return preConfig; } /// <summary> /// Shows a form for the user to select/change settings through a GUI. All interactive /// GUI operations MUST happen within this function scope. /// </summary> /// <param name="preConfig">Result of PreConfig</param> /// <param name="previousConfig">Previous configuration to initialize the GUI.</param> /// <returns>Null if operation is cancelled by the user. Otherwise returns with a new /// configuration object.</returns> public IInterpreterConfiguration DoGUIConfiguration( IInterpreterPreConfiguration preConfig, IInterpreterConfiguration previousConfig) { DialogResult ok = DialogResult.Cancel; var settings = previousConfig as CyPhy2Modelica_v2Settings; if (settings == null) { settings = new CyPhy2Modelica_v2Settings(); } using (MainForm mf = new MainForm(settings, (preConfig as CyPhy2Modelica_v2PreConfiguration).ProjectDirectory)) { // show main form ok = mf.ShowDialog(); } if (ok == DialogResult.OK) { return settings; } return null; } private Queue<Tuple<string, TimeSpan>> runtime = new Queue<Tuple<string, TimeSpan>>(); private DateTime startTime = DateTime.Now; /// <summary> /// No GUI and interactive elements are allowed within this function. /// </summary> /// <param name="parameters">Main parameters for this run and GUI configuration.</param> /// <returns>Result of the run, which contains a success flag.</returns> public IInterpreterResult Main(IInterpreterMainParameters parameters) { bool disposeLogger = false; try { if (this.Logger == null) { this.Logger = new CyPhyGUIs.GMELogger(parameters.Project, this.ComponentName); disposeLogger = true; } this.Logger.WriteInfo("Running CyPhy2Modelica 2.0"); System.Windows.Forms.Application.DoEvents(); var asyncResult = this.Logger.LoggingVersionInfo.BeginInvoke(parameters.Project, null, null); var header = this.Logger.LoggingVersionInfo.EndInvoke(asyncResult); this.Logger.WriteDebug(header); MainThrows(parameters); } catch (Exception ex) { this.Logger.WriteError("Exception was thrown : {0}", ex.ToString()); this.result.Success = false; } finally { if (disposeLogger && this.Logger != null) { this.Logger.Dispose(); this.Logger = null; } } return this.result; } /// <summary> /// Actual implementation of Main, does not catch general exceptions. /// </summary> /// <param name="parameters">Main parameters for this run and GUI configuration.</param> /// <returns>Result of the run, which contains a success flag.</returns> public IInterpreterResult MainThrows(IInterpreterMainParameters parameters) { this.runtime.Clear(); this.mainParameters = (InterpreterMainParameters) parameters; this.Factory = new CyPhyTypeFactory(parameters.Project.RootMeta); var configSuccess = this.mainParameters != null; if (configSuccess && this.mainParameters.config == null) { var config = META.ComComponent.DeserializeConfiguration(this.mainParameters.ProjectDirectory, typeof(CyPhy2Modelica_v2Settings), this.ComponentProgID) as CyPhy2Modelica_v2Settings; if (config != null) { this.mainParameters.config = config; } else { this.mainParameters.config = new CyPhy2Modelica_v2Settings(); } configSuccess = this.mainParameters.config != null; } if (this.result.Traceability == null) { this.result.Traceability = new META.MgaTraceability(); } // getting traceability from caller, like master interpreter if (this.mainParameters.Traceability != null) { this.mainParameters.Traceability.CopyTo(this.result.Traceability); } // If called from InvokeEx logger is already created bool disposeLogger = false; if (this.Logger == null) { this.Logger = new CyPhyGUIs.GMELogger(this.mainParameters.Project, this.ComponentName); disposeLogger = true; } // this.Logger.MakeVersionInfoHeaderAsync(); if (this.mainParameters.VerboseConsole) { this.Logger.GMEConsoleLoggingLevel = SmartLogger.MessageType_enum.Debug; this.Logger.WriteDebug("Components not used in this design will still be generated in the Modelica Package."); this.GenerateUnusedComponents = true; } else { this.Logger.GMEConsoleLoggingLevel = SmartLogger.MessageType_enum.Info; this.Logger.WriteDebug("Components not used in this design will not be generated in the Modelica Package. Use verbose console to enable this option."); this.GenerateUnusedComponents = false; } this.result.Traceability.CopyTo(this.Logger.Traceability); this.UpdateSuccess("Configuration", configSuccess); this.result.Labels = "OpenModelica && py_modelica12.08"; this.result.RunCommand = "om_simulate.py"; //var elaboratorSuccess = this.CallElaboratorOLD(this.mainParameters.Project, // this.mainParameters.CurrentFCO, this.mainParameters.SelectedFCOs, this.mainParameters.StartModeParam); try { MgaGateway.PerformInTransaction(delegate { this.WorkInMainTransaction(); }, transactiontype_enum.TRANSACTION_NON_NESTED, abort: true); this.PrintRuntimeStatistics(); if (this.result.Success) { this.Logger.WriteInfo("CyPhy2Modelica 2.0 finished successfully."); this.Logger.WriteInfo("Generated files are here: <a href=\"file:///{0}\" target=\"_blank\">{0}</a>", this.mainParameters.OutputDirectory); this.Logger.WriteDebug("[SUCCESS: {0}, Labels: {1}]", this.result.Success, this.result.Labels); } else { this.Logger.WriteError("CyPhy2Modelica 2.0 failed! See error messages above."); } } finally { if (disposeLogger && this.Logger != null) { this.Logger.Dispose(); this.Logger = null; } if (MgaGateway != null && MgaGateway.territory != null) { MgaGateway.territory.Destroy(); } MgaGateway = null; GC.Collect(); GC.WaitForPendingFinalizers(); GC.Collect(); GC.WaitForPendingFinalizers(); } return this.result; } private bool CheckGeneratedModelWithDymola(Modelica.ComponentMap modelicaURIMaps) { var modelicaSettings = this.mainParameters.config as CyPhy2Modelica_v2Settings; int secondsToWait = 120; var dymolaExe = MainForm.DymolaExe; var result = false; if (File.Exists(dymolaExe) == false) { //GMEConsole.Warning.WriteLine("Dymola install was not found."); this.Logger.WriteWarning("Dymola install was not found."); this.Logger.WriteWarning("The generate code cannot be checked with Dymola."); return true; } else { this.Logger.WriteDebug("Dymola install was found: " + dymolaExe); } try { var tb = CyPhyClasses.TestBench.Cast(this.mainParameters.CurrentFCO); // generate script file for model check StringBuilder sb = new StringBuilder(); sb.AppendLine("Advanced.TranslationInCommandLog = true"); sb.AppendLine("openModel(\"CyPhy\\package.mo\")"); sb.AppendLine("checkModel(\"" + Modelica.CodeGenerator.MainPackage + ".TestBenches." + tb.Name + "\")"); sb.AppendLine("savelog(\"checkModelDymola.txt\")"); sb.AppendLine("exit()"); string checkModelMos = Path.Combine(this.mainParameters.OutputDirectory, Modelica.CodeGenerator.MainPackage, "checkModelDymola.mos"); File.WriteAllText(checkModelMos, sb.ToString()); Process p = new Process(); p.StartInfo = new ProcessStartInfo() { Arguments = " /nowindow " + checkModelMos, FileName = dymolaExe, CreateNoWindow = true, UseShellExecute = false, WorkingDirectory = this.mainParameters.OutputDirectory }; if (p.StartInfo.EnvironmentVariables["MODELICAPATH"] != null) { p.StartInfo.EnvironmentVariables.Remove("MODELICAPATH"); } var modelicaPath = Path.GetFullPath(Path.Combine(this.mainParameters.OutputDirectory, "Libraries")); p.StartInfo.EnvironmentVariables.Add("MODELICAPATH", modelicaPath); this.Logger.WriteInfo("Checking model with Dymola..."); this.Logger.WriteDebug("DymolaExe : {0}", dymolaExe); this.Logger.WriteDebug("MODELICAPATH : {0}", modelicaPath); System.Windows.Forms.Application.DoEvents(); p.Start(); if (p.WaitForExit(1000 * secondsToWait) == false) { p.Kill(); this.Logger.WriteError("Dymola checking process did not exit within {0} seconds - the process was killed.", secondsToWait); this.Logger.WriteError("Do you have a Dymola license? If so please leave a Dymola instance open (it will keep the license checked out)."); return result; } // read generated results back string s = File.ReadAllText(Path.Combine(this.mainParameters.OutputDirectory, Modelica.CodeGenerator.MainPackage, "checkModelDymola.txt")); // display it on the GME console if (s.Contains("Check of " + Modelica.CodeGenerator.MainPackage + ".TestBenches." + tb.Name + " successful.")) { // TODO: maybe check for next line = true? this.Logger.WriteInfo("Successful Dymola check."); result = true; } else { result = false; this.Logger.WriteError("Failed Dymola check."); var startRecording = false; foreach (var line in s.Split(Environment.NewLine.ToArray()).Where(l => string.IsNullOrWhiteSpace(l) == false)) { if (startRecording == false) { if (line.Contains("Advanced.TranslationInCommandLog = true")) { startRecording = true; } } else { if (line.Contains("savelog(\"checkModelDymola.txt\")")) { break; } this.Logger.WriteError(TryConvertErrorStringToHyperLink(line, modelicaURIMaps)); } } } } catch (Exception ex) { //Trace.TraceError(ex.ToString()); this.Logger.WriteError(ex.ToString()); result = false; } return result; } private bool CheckGeneratedModelWithOpenModelica(Modelica.ComponentMap modelicaURIMaps, ModelConfig modelConfig) { var modelicaSettings = this.mainParameters.config as CyPhy2Modelica_v2Settings; var omcExe = MainForm.OMCExe; var result = false; if (File.Exists(omcExe) == false) { this.Logger.WriteWarning("OpenModelica installation was not found."); this.Logger.WriteWarning("The generate code cannot be checked with OpenModelica."); return true; } else { this.Logger.WriteDebug("OpenModelica compiler was found: " + omcExe); } try { var tb = CyPhyClasses.TestBench.Cast(this.mainParameters.CurrentFCO); var externalLibraries = new List<String>(); externalLibraries = modelConfig.lib_package_names; var mslVersion = modelConfig.MSL_version; // generate script file for model check StringBuilder sb = new StringBuilder(); sb.AppendLine("// Checking model in OpenModelica"); sb.AppendLine(string.Format("loadModel(Modelica, {{\"{0}\"}});", mslVersion)); foreach (var item in externalLibraries) { sb.AppendLine(string.Format("loadModel({0});", item)); } sb.AppendLine("loadFile(\"CyPhy/package.mo\");"); sb.AppendLine(string.Format("checkStr := checkModel({0}.TestBenches.{1});", Modelica.CodeGenerator.MainPackage, tb.Name)); sb.AppendLine("writeFile(\"CyPhy/checkModelOpenModelica.txt\", checkStr);"); string checkModelMos = Path.Combine(this.mainParameters.OutputDirectory, Modelica.CodeGenerator.MainPackage, "checkModelOpenModelica.mos"); File.WriteAllText(checkModelMos, sb.ToString()); // call mos file to check model Process p = new Process(); p.StartInfo = new ProcessStartInfo() { Arguments = string.Format("+q +s \"{0}\"", checkModelMos), FileName = omcExe, CreateNoWindow = true, UseShellExecute = false, WorkingDirectory = this.mainParameters.OutputDirectory }; if (p.StartInfo.EnvironmentVariables["OPENMODELICALIBRARY"] != null) { p.StartInfo.EnvironmentVariables.Remove("OPENMODELICALIBRARY"); } var modelicaPath = Path.Combine(Environment.GetEnvironmentVariable("OPENMODELICAHOME"), "lib", "omlibrary") + ";" + Path.GetFullPath(Path.Combine(this.mainParameters.OutputDirectory, "Libraries")); p.StartInfo.EnvironmentVariables.Add("OPENMODELICALIBRARY", modelicaPath); this.Logger.WriteInfo("Checking model with OpenModelica..."); this.Logger.WriteDebug("OmcExe : {0}", omcExe); this.Logger.WriteDebug("OPENMODELICALIBRARY : {0}", modelicaPath); System.Windows.Forms.Application.DoEvents(); p.Start(); p.WaitForExit(); // read generated results back string s = File.ReadAllText(Path.Combine(this.mainParameters.OutputDirectory, Modelica.CodeGenerator.MainPackage, "checkModelOpenModelica.txt")); // display it on the GME console if (s.Contains("Check of " + Modelica.CodeGenerator.MainPackage + ".TestBenches." + tb.Name + " completed successfully.")) { // TODO: maybe check for next line = true? this.Logger.WriteInfo("Successful OpenModelica check."); result = true; } else { result = false; this.Logger.WriteError("Failed OpenModelica check."); s.Split(Environment.NewLine.ToArray()) .Where(l => string.IsNullOrWhiteSpace(l) == false).ToList() .ForEach(x => this.Logger.WriteError(TryConvertErrorStringToHyperLink(x, modelicaURIMaps))); } } catch (Exception ex) { //Trace.TraceError(ex.ToString()); this.Logger.WriteError(ex.ToString()); result = false; } return result; } private string TryConvertErrorStringToHyperLink(string errorMsg, Modelica.ComponentMap modelicaURIMaps) { // Assumption, only checks each line for one mapping! var result = errorMsg.Replace("__CyPhy__", ""); var match = ""; foreach (var kvp in modelicaURIMaps.ModelMapping) { if (errorMsg.Contains(kvp.Key) && kvp.Key.Length > match.Length) { // Make sure the longest ModelicaURI is used. match = kvp.Key; } } if (string.IsNullOrWhiteSpace(match) == false) { var cInfo = modelicaURIMaps.ModelMapping[match]; var mgaComponent = this.mainParameters.Project.GetFCOByID(cInfo.ID); if (mgaComponent != null) { return errorMsg.Replace(match, GmeConsoleHelper.ToMgaHyperLink(mgaComponent, this.result.Traceability)); } else { this.Logger.WriteWarning("Could not obtain {0} from IDMap using {1}", match, cInfo.Path); } } else { foreach (var kvp in modelicaURIMaps.InstanceMapping) { if (errorMsg.Contains(kvp.Key) && kvp.Key.Length > match.Length) { // Make sure the longest ModelicaURI is used. match = kvp.Key; } } if (string.IsNullOrWhiteSpace(match) == false) { var cInfo = modelicaURIMaps.InstanceMapping[match]; var mgaComponent = this.mainParameters.Project.GetFCOByID(cInfo.ID); if (mgaComponent != null) { return errorMsg.Replace(match, GmeConsoleHelper.ToMgaHyperLink(mgaComponent, this.result.Traceability)); } else { this.Logger.WriteWarning("Could not obtain {0} from IDMap using {1}", match, cInfo.Path); } } } return result; } private void PrintRuntimeStatistics() { this.Logger.WriteDebug("======================================================"); this.Logger.WriteDebug("Start time: {0}", this.startTime); foreach (var time in this.runtime) { this.Logger.WriteDebug("{0} = {1}", time.Item1, time.Item2); } this.Logger.WriteDebug("======================================================"); } #endregion #region CyPhy2Modelica Specific code public CyPhyTypeFactory Factory { get; set; } private bool GenerateUnusedComponents { get; set; } /// <summary> /// This function does the job. CyPhy2Modelica translation. /// </summary> private void WorkInMainTransaction() { this.Logger.WriteInfo("{0} [{1}]", this.mainParameters.CurrentFCO.Name, this.mainParameters.CurrentFCO.MetaBase.Name); this.result.Success = true; // Try to get the original design name for elaboration (used when generating testbench_manifest) string originalDesignName = GetOriginalDesignName(); // 1) try to call dependencies - elaborate var elaboratorSuccess = this.CallElaborator(this.mainParameters.Project, this.mainParameters.CurrentFCO, this.mainParameters.SelectedFCOs, this.mainParameters.StartModeParam); //var elaboratorSuccess = true; this.UpdateSuccess("Elaborator", elaboratorSuccess); if (elaboratorSuccess == false) { return; } // 2) Check rules on Elborated model this.Logger.WriteDebug("Checking rules..."); var checker = new Rules.Checker(this.mainParameters, this.Logger, this.result.Traceability, this.Factory); checker.Check(); checker.PrintDetails(); this.UpdateSuccess("Model check", checker.Success); // if user held the control ignore the checker results ... for debugging purposes ONLY! bool controlWasHeld = false; int VK_CONTROL = 0x11; if ((bool)((GetKeyState(VK_CONTROL) & 0x8000) == 0x8000)) { controlWasHeld = true; } if (this.result.Success == false) { if (controlWasHeld) { this.Logger.WriteWarning("Control was held, will continue generating code."); } else { return; } } // 3) call cyber var cyberSuccess = this.CallCyber(this.mainParameters.Project, this.mainParameters.CurrentFCO, this.mainParameters.SelectedFCOs, this.mainParameters.StartModeParam); if (cyberSuccess == false) { this.Logger.WriteWarning("Cyber code generator failed! Modelica interpreter ignores this failure."); } // FIXME: add cyber back when it is stable //this.UpdateSuccess("Cyber code generator", cyberSuccess); // 4) Generate the Modelica package var modelicaCodeGenerator = new Modelica.CodeGenerator(this.mainParameters, this.Logger, this.result.Traceability); modelicaCodeGenerator.GenerateUnusedComponents = this.GenerateUnusedComponents; modelicaCodeGenerator.GenerateModelicaCode(); this.result.Labels = modelicaCodeGenerator.SolverSettings.ToolSelection + " && py_modelica" + JobManager.Job.LabelVersion; var limitDefintion = modelicaCodeGenerator.LimitDefintion; var modelicaURIMaps = modelicaCodeGenerator.ModelicaURIMap; this.UpdateSuccess("Modelica translation", true); // 5) post actions - pre/post processing scripts and test bench manifest //Scripts.Generator.GMEConsole = GMEConsole; var scripts = new Scripts.Generator(this.mainParameters, this.Logger); scripts.LimitDefinition = limitDefintion; var successScripts = scripts.Generate(originalDesignName); this.result.RunCommand = scripts.RunCommand; var modelConfig = scripts.CurrentModelConfig; this.UpdateSuccess("Script generation", successScripts); var modelicaSettings = this.mainParameters.config as CyPhy2Modelica_v2Settings; // 6) Call checkModel from selected tool(s). if (this.result.Success == false) { return; } if (modelicaSettings.CheckWithDymola) { this.UpdateSuccess("Dymola Check", this.CheckGeneratedModelWithDymola(modelicaURIMaps)); } if (modelicaSettings.CheckWithOpenModelica) { this.UpdateSuccess("OpenModelica Check", this.CheckGeneratedModelWithOpenModelica(modelicaURIMaps, modelConfig)); } // TODO: this part needs to be refactored! string Parameters = this.mainParameters .CurrentFCO .ChildObjects .OfType<MgaReference>() .FirstOrDefault(x => x.Meta.Name == "WorkflowRef") .Referred .ChildObjects .OfType<MgaAtom>() .FirstOrDefault() .StrAttrByName["Parameters"]; Dictionary<string, string> workflowParameters = new Dictionary<string, string>(); try { workflowParameters = (Dictionary<string, string>)Newtonsoft.Json.JsonConvert.DeserializeObject(Parameters, typeof(Dictionary<string, string>)); if (workflowParameters == null) { workflowParameters = new Dictionary<string, string>(); } } catch (Newtonsoft.Json.JsonReaderException) { } META.AnalysisTool.ApplyToolSelection(this.ComponentProgID, workflowParameters, this.result, this.mainParameters); } /// <summary> /// If the context is correct and TopLevelSystemUnderTest points to a component-assembly it returns the name /// of that (first if many) component assembly. /// Else it returns null. /// </summary> /// <returns>String of original design name.</returns> private string GetOriginalDesignName() { string originalDesignName = null; if (this.mainParameters.CurrentFCO.MetaBase.Name == typeof(CyPhy.TestBench).Name) { var testBench = CyPhyClasses.TestBench.Cast(this.mainParameters.CurrentFCO); var tlsut = testBench.Children.TopLevelSystemUnderTestCollection.FirstOrDefault(); if (tlsut != null && tlsut.AllReferred != null && tlsut.AllReferred.Kind == typeof(CyPhy.ComponentAssembly).Name) { originalDesignName = tlsut.AllReferred.Name; this.Logger.WriteDebug("Found originalDesignName {0}", originalDesignName); } } return originalDesignName; } [DllImport("user32.dll")] public static extern short GetKeyState(int Key); #endregion } }
using System; using System.Collections.Generic; using System.Text; using JetBrains.Annotations; using UnityEditor.Graphing; namespace UnityEditor.ShaderGraph { struct ShaderStringMapping { public AbstractMaterialNode node { get; set; } public int startIndex { get; set; } public int count { get; set; } } class ShaderStringBuilder : IDisposable { enum ScopeType { Indent, Block, BlockSemicolon } StringBuilder m_StringBuilder; Stack<ScopeType> m_ScopeStack; int m_IndentationLevel; ShaderStringMapping m_CurrentMapping; List<ShaderStringMapping> m_Mappings; const string k_IndentationString = " "; internal AbstractMaterialNode currentNode { get { return m_CurrentMapping.node; } set { m_CurrentMapping.count = m_StringBuilder.Length - m_CurrentMapping.startIndex; if (m_CurrentMapping.count > 0) m_Mappings.Add(m_CurrentMapping); m_CurrentMapping.node = value; m_CurrentMapping.startIndex = m_StringBuilder.Length; m_CurrentMapping.count = 0; } } public ShaderStringBuilder() { m_StringBuilder = new StringBuilder(); m_ScopeStack = new Stack<ScopeType>(); m_Mappings = new List<ShaderStringMapping>(); m_CurrentMapping = new ShaderStringMapping(); } public ShaderStringBuilder(int indentationLevel) : this() { IncreaseIndent(indentationLevel); } public void AppendNewLine() { m_StringBuilder.AppendLine(); } public void AppendLine(string value) { if (!string.IsNullOrEmpty(value)) { AppendIndentation(); m_StringBuilder.Append(value); } AppendNewLine(); } [StringFormatMethod("formatString")] public void AppendLine(string formatString, params object[] args) { AppendIndentation(); m_StringBuilder.AppendFormat(formatString, args); AppendNewLine(); } public void AppendLines(string lines) { if (string.IsNullOrEmpty(lines)) return; var splitLines = lines.Split('\n'); var lineCount = splitLines.Length; var lastLine = splitLines[lineCount - 1]; if (string.IsNullOrEmpty(lastLine) || lastLine == "\r") lineCount--; for (var i = 0; i < lineCount; i++) AppendLine(splitLines[i].Trim('\r')); } public void Append(string value) { m_StringBuilder.Append(value); } public void Append(string value, int start, int count) { m_StringBuilder.Append(value, start, count); } [StringFormatMethod("formatString")] public void Append(string formatString, params object[] args) { m_StringBuilder.AppendFormat(formatString, args); } public void AppendSpaces(int count) { m_StringBuilder.Append(' ', count); } public void AppendIndentation() { for (var i = 0; i < m_IndentationLevel; i++) m_StringBuilder.Append(k_IndentationString); } public IDisposable IndentScope() { m_ScopeStack.Push(ScopeType.Indent); IncreaseIndent(); return this; } public IDisposable BlockScope() { AppendLine("{"); IncreaseIndent(); m_ScopeStack.Push(ScopeType.Block); return this; } public IDisposable BlockSemicolonScope() { AppendLine("{"); IncreaseIndent(); m_ScopeStack.Push(ScopeType.BlockSemicolon); return this; } public void IncreaseIndent() { m_IndentationLevel++; } public void IncreaseIndent(int level) { for (var i = 0; i < level; i++) IncreaseIndent(); } public void DecreaseIndent() { m_IndentationLevel--; } public void DecreaseIndent(int level) { for (var i = 0; i < level; i++) DecreaseIndent(); } public void Dispose() { switch (m_ScopeStack.Pop()) { case ScopeType.Indent: DecreaseIndent(); break; case ScopeType.Block: DecreaseIndent(); AppendLine("}"); break; case ScopeType.BlockSemicolon: DecreaseIndent(); AppendLine("};"); break; } } public void Concat(ShaderStringBuilder other) { // First re-add all the mappings from `other`, such that their mappings are transformed. foreach (var mapping in other.m_Mappings) { currentNode = mapping.node; // Use `AppendLines` to indent according to the current indentation. AppendLines(other.ToString(mapping.startIndex, mapping.count)); } currentNode = other.currentNode; AppendLines(other.ToString(other.m_CurrentMapping.startIndex, other.length - other.m_CurrentMapping.startIndex)); } public override string ToString() { return m_StringBuilder.ToString(); } public string ToString(out ShaderSourceMap sourceMap) { m_CurrentMapping.count = m_StringBuilder.Length - m_CurrentMapping.startIndex; if (m_CurrentMapping.count > 0) m_Mappings.Add(m_CurrentMapping); var source = m_StringBuilder.ToString(); sourceMap = new ShaderSourceMap(source, m_Mappings); m_Mappings.RemoveAt(m_Mappings.Count - 1); m_CurrentMapping.count = 0; return source; } public string ToString(int startIndex, int length) { return m_StringBuilder.ToString(startIndex, length); } internal void Clear() { m_StringBuilder.Length = 0; } internal int length { get { return m_StringBuilder.Length; } set { m_StringBuilder.Length = value; } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! using pb = global::Google.ProtocolBuffers; using pbc = global::Google.ProtocolBuffers.Collections; using pbd = global::Google.ProtocolBuffers.Descriptors; using scg = global::System.Collections.Generic; namespace Sirikata.Physics.Protocol._PBJ_Internal { public static partial class Physics { #region Extension registration public static void RegisterAllExtensions(pb::ExtensionRegistry registry) { } #endregion #region Static variables internal static pbd::MessageDescriptor internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__Descriptor; internal static pb::FieldAccess.FieldAccessorTable<global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin, global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin.Builder> internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__FieldAccessorTable; internal static pbd::MessageDescriptor internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__Descriptor; internal static pb::FieldAccess.FieldAccessorTable<global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd, global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd.Builder> internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__FieldAccessorTable; #endregion #region Descriptor public static pbd::FileDescriptor Descriptor { get { return descriptor; } } private static pbd::FileDescriptor descriptor; static Physics() { byte[] descriptorData = global::System.Convert.FromBase64String( "Cg1QaHlzaWNzLnByb3RvEidTaXJpa2F0YS5QaHlzaWNzLlByb3RvY29sLl9Q" + "QkpfSW50ZXJuYWwiqAEKDkNvbGxpc2lvbkJlZ2luEhEKCXRpbWVzdGFtcBgC" + "IAEoBhIZCg10aGlzX3Bvc2l0aW9uGAMgAygBQgIQARIaCg5vdGhlcl9wb3Np" + "dGlvbhgEIAMoAUICEAESFwoLdGhpc19ub3JtYWwYBSADKAJCAhABEhMKB2lt" + "cHVsc2UYBiADKAJCAhABEh4KFm90aGVyX29iamVjdF9yZWZlcmVuY2UYByAB" + "KAwiQQoMQ29sbGlzaW9uRW5kEhEKCXRpbWVzdGFtcBgCIAEoBhIeChZvdGhl" + "cl9vYmplY3RfcmVmZXJlbmNlGAYgASgM"); pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) { descriptor = root; internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__Descriptor = Descriptor.MessageTypes[0]; internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__FieldAccessorTable = new pb::FieldAccess.FieldAccessorTable<global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin, global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin.Builder>(internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__Descriptor, new string[] { "Timestamp", "ThisPosition", "OtherPosition", "ThisNormal", "Impulse", "OtherObjectReference", }); internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__Descriptor = Descriptor.MessageTypes[1]; internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__FieldAccessorTable = new pb::FieldAccess.FieldAccessorTable<global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd, global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd.Builder>(internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__Descriptor, new string[] { "Timestamp", "OtherObjectReference", }); return null; }; pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData, new pbd::FileDescriptor[] { }, assigner); } #endregion } #region Messages public sealed partial class CollisionBegin : pb::GeneratedMessage<CollisionBegin, CollisionBegin.Builder> { private static readonly CollisionBegin defaultInstance = new Builder().BuildPartial(); public static CollisionBegin DefaultInstance { get { return defaultInstance; } } public override CollisionBegin DefaultInstanceForType { get { return defaultInstance; } } protected override CollisionBegin ThisMessage { get { return this; } } public static pbd::MessageDescriptor Descriptor { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.Physics.internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__Descriptor; } } protected override pb::FieldAccess.FieldAccessorTable<CollisionBegin, CollisionBegin.Builder> InternalFieldAccessors { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.Physics.internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionBegin__FieldAccessorTable; } } public const int TimestampFieldNumber = 2; private bool hasTimestamp; private ulong timestamp_ = 0; public bool HasTimestamp { get { return hasTimestamp; } } [global::System.CLSCompliant(false)] public ulong Timestamp { get { return timestamp_; } } public const int ThisPositionFieldNumber = 3; private int thisPositionMemoizedSerializedSize; private pbc::PopsicleList<double> thisPosition_ = new pbc::PopsicleList<double>(); public scg::IList<double> ThisPositionList { get { return pbc::Lists.AsReadOnly(thisPosition_); } } public int ThisPositionCount { get { return thisPosition_.Count; } } public double GetThisPosition(int index) { return thisPosition_[index]; } public const int OtherPositionFieldNumber = 4; private int otherPositionMemoizedSerializedSize; private pbc::PopsicleList<double> otherPosition_ = new pbc::PopsicleList<double>(); public scg::IList<double> OtherPositionList { get { return pbc::Lists.AsReadOnly(otherPosition_); } } public int OtherPositionCount { get { return otherPosition_.Count; } } public double GetOtherPosition(int index) { return otherPosition_[index]; } public const int ThisNormalFieldNumber = 5; private int thisNormalMemoizedSerializedSize; private pbc::PopsicleList<float> thisNormal_ = new pbc::PopsicleList<float>(); public scg::IList<float> ThisNormalList { get { return pbc::Lists.AsReadOnly(thisNormal_); } } public int ThisNormalCount { get { return thisNormal_.Count; } } public float GetThisNormal(int index) { return thisNormal_[index]; } public const int ImpulseFieldNumber = 6; private int impulseMemoizedSerializedSize; private pbc::PopsicleList<float> impulse_ = new pbc::PopsicleList<float>(); public scg::IList<float> ImpulseList { get { return pbc::Lists.AsReadOnly(impulse_); } } public int ImpulseCount { get { return impulse_.Count; } } public float GetImpulse(int index) { return impulse_[index]; } public const int OtherObjectReferenceFieldNumber = 7; private bool hasOtherObjectReference; private pb::ByteString otherObjectReference_ = pb::ByteString.Empty; public bool HasOtherObjectReference { get { return hasOtherObjectReference; } } public pb::ByteString OtherObjectReference { get { return otherObjectReference_; } } public override bool IsInitialized { get { return true; } } public override void WriteTo(pb::CodedOutputStream output) { if (HasTimestamp) { output.WriteFixed64(2, Timestamp); } if (thisPosition_.Count > 0) { output.WriteRawVarint32(26); output.WriteRawVarint32((uint) thisPositionMemoizedSerializedSize); foreach (double element in thisPosition_) { output.WriteDoubleNoTag(element); } } if (otherPosition_.Count > 0) { output.WriteRawVarint32(34); output.WriteRawVarint32((uint) otherPositionMemoizedSerializedSize); foreach (double element in otherPosition_) { output.WriteDoubleNoTag(element); } } if (thisNormal_.Count > 0) { output.WriteRawVarint32(42); output.WriteRawVarint32((uint) thisNormalMemoizedSerializedSize); foreach (float element in thisNormal_) { output.WriteFloatNoTag(element); } } if (impulse_.Count > 0) { output.WriteRawVarint32(50); output.WriteRawVarint32((uint) impulseMemoizedSerializedSize); foreach (float element in impulse_) { output.WriteFloatNoTag(element); } } if (HasOtherObjectReference) { output.WriteBytes(7, OtherObjectReference); } UnknownFields.WriteTo(output); } private int memoizedSerializedSize = -1; public override int SerializedSize { get { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (HasTimestamp) { size += pb::CodedOutputStream.ComputeFixed64Size(2, Timestamp); } { int dataSize = 0; dataSize = 8 * thisPosition_.Count; size += dataSize; if (thisPosition_.Count!=0) size += 1 + pb::CodedOutputStream.ComputeInt32SizeNoTag(dataSize); thisPositionMemoizedSerializedSize = dataSize; } { int dataSize = 0; dataSize = 8 * otherPosition_.Count; size += dataSize; if (otherPosition_.Count!=0) size += 1 + pb::CodedOutputStream.ComputeInt32SizeNoTag(dataSize); otherPositionMemoizedSerializedSize = dataSize; } { int dataSize = 0; dataSize = 4 * thisNormal_.Count; size += dataSize; if (thisNormal_.Count!=0) size += 1 + pb::CodedOutputStream.ComputeInt32SizeNoTag(dataSize); thisNormalMemoizedSerializedSize = dataSize; } { int dataSize = 0; dataSize = 4 * impulse_.Count; size += dataSize; if (impulse_.Count!=0) size += 1 + pb::CodedOutputStream.ComputeInt32SizeNoTag(dataSize); impulseMemoizedSerializedSize = dataSize; } if (HasOtherObjectReference) { size += pb::CodedOutputStream.ComputeBytesSize(7, OtherObjectReference); } size += UnknownFields.SerializedSize; memoizedSerializedSize = size; return size; } } public static CollisionBegin ParseFrom(pb::ByteString data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static CollisionBegin ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static CollisionBegin ParseFrom(byte[] data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static CollisionBegin ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static CollisionBegin ParseFrom(global::System.IO.Stream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static CollisionBegin ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static CollisionBegin ParseDelimitedFrom(global::System.IO.Stream input) { return CreateBuilder().MergeDelimitedFrom(input).BuildParsed(); } public static CollisionBegin ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed(); } public static CollisionBegin ParseFrom(pb::CodedInputStream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static CollisionBegin ParseFrom(pb::CodedInputStream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static Builder CreateBuilder() { return new Builder(); } public override Builder ToBuilder() { return CreateBuilder(this); } public override Builder CreateBuilderForType() { return new Builder(); } public static Builder CreateBuilder(CollisionBegin prototype) { return (Builder) new Builder().MergeFrom(prototype); } public sealed partial class Builder : pb::GeneratedBuilder<CollisionBegin, Builder> { protected override Builder ThisBuilder { get { return this; } } public Builder() {} CollisionBegin result = new CollisionBegin(); protected override CollisionBegin MessageBeingBuilt { get { return result; } } public override Builder Clear() { result = new CollisionBegin(); return this; } public override Builder Clone() { return new Builder().MergeFrom(result); } public override pbd::MessageDescriptor DescriptorForType { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin.Descriptor; } } public override CollisionBegin DefaultInstanceForType { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin.DefaultInstance; } } public override CollisionBegin BuildPartial() { if (result == null) { throw new global::System.InvalidOperationException("build() has already been called on this Builder"); } result.thisPosition_.MakeReadOnly(); result.otherPosition_.MakeReadOnly(); result.thisNormal_.MakeReadOnly(); result.impulse_.MakeReadOnly(); CollisionBegin returnMe = result; result = null; return returnMe; } public override Builder MergeFrom(pb::IMessage other) { if (other is CollisionBegin) { return MergeFrom((CollisionBegin) other); } else { base.MergeFrom(other); return this; } } public override Builder MergeFrom(CollisionBegin other) { if (other == global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionBegin.DefaultInstance) return this; if (other.HasTimestamp) { Timestamp = other.Timestamp; } if (other.thisPosition_.Count != 0) { base.AddRange(other.thisPosition_, result.thisPosition_); } if (other.otherPosition_.Count != 0) { base.AddRange(other.otherPosition_, result.otherPosition_); } if (other.thisNormal_.Count != 0) { base.AddRange(other.thisNormal_, result.thisNormal_); } if (other.impulse_.Count != 0) { base.AddRange(other.impulse_, result.impulse_); } if (other.HasOtherObjectReference) { OtherObjectReference = other.OtherObjectReference; } this.MergeUnknownFields(other.UnknownFields); return this; } public override Builder MergeFrom(pb::CodedInputStream input) { return MergeFrom(input, pb::ExtensionRegistry.Empty); } public override Builder MergeFrom(pb::CodedInputStream input, pb::ExtensionRegistry extensionRegistry) { pb::UnknownFieldSet.Builder unknownFields = null; while (true) { uint tag = input.ReadTag(); switch (tag) { case 0: { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } default: { if (pb::WireFormat.IsEndGroupTag(tag)) { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag); break; } case 17: { Timestamp = input.ReadFixed64(); break; } case 26: { int length = input.ReadInt32(); int limit = input.PushLimit(length); while (!input.ReachedLimit) { AddThisPosition(input.ReadDouble()); } input.PopLimit(limit); break; } case 34: { int length = input.ReadInt32(); int limit = input.PushLimit(length); while (!input.ReachedLimit) { AddOtherPosition(input.ReadDouble()); } input.PopLimit(limit); break; } case 42: { int length = input.ReadInt32(); int limit = input.PushLimit(length); while (!input.ReachedLimit) { AddThisNormal(input.ReadFloat()); } input.PopLimit(limit); break; } case 50: { int length = input.ReadInt32(); int limit = input.PushLimit(length); while (!input.ReachedLimit) { AddImpulse(input.ReadFloat()); } input.PopLimit(limit); break; } case 58: { OtherObjectReference = input.ReadBytes(); break; } } } } public bool HasTimestamp { get { return result.HasTimestamp; } } [global::System.CLSCompliant(false)] public ulong Timestamp { get { return result.Timestamp; } set { SetTimestamp(value); } } [global::System.CLSCompliant(false)] public Builder SetTimestamp(ulong value) { result.hasTimestamp = true; result.timestamp_ = value; return this; } public Builder ClearTimestamp() { result.hasTimestamp = false; result.timestamp_ = 0; return this; } public pbc::IPopsicleList<double> ThisPositionList { get { return result.thisPosition_; } } public int ThisPositionCount { get { return result.ThisPositionCount; } } public double GetThisPosition(int index) { return result.GetThisPosition(index); } public Builder SetThisPosition(int index, double value) { result.thisPosition_[index] = value; return this; } public Builder AddThisPosition(double value) { result.thisPosition_.Add(value); return this; } public Builder AddRangeThisPosition(scg::IEnumerable<double> values) { base.AddRange(values, result.thisPosition_); return this; } public Builder ClearThisPosition() { result.thisPosition_.Clear(); return this; } public pbc::IPopsicleList<double> OtherPositionList { get { return result.otherPosition_; } } public int OtherPositionCount { get { return result.OtherPositionCount; } } public double GetOtherPosition(int index) { return result.GetOtherPosition(index); } public Builder SetOtherPosition(int index, double value) { result.otherPosition_[index] = value; return this; } public Builder AddOtherPosition(double value) { result.otherPosition_.Add(value); return this; } public Builder AddRangeOtherPosition(scg::IEnumerable<double> values) { base.AddRange(values, result.otherPosition_); return this; } public Builder ClearOtherPosition() { result.otherPosition_.Clear(); return this; } public pbc::IPopsicleList<float> ThisNormalList { get { return result.thisNormal_; } } public int ThisNormalCount { get { return result.ThisNormalCount; } } public float GetThisNormal(int index) { return result.GetThisNormal(index); } public Builder SetThisNormal(int index, float value) { result.thisNormal_[index] = value; return this; } public Builder AddThisNormal(float value) { result.thisNormal_.Add(value); return this; } public Builder AddRangeThisNormal(scg::IEnumerable<float> values) { base.AddRange(values, result.thisNormal_); return this; } public Builder ClearThisNormal() { result.thisNormal_.Clear(); return this; } public pbc::IPopsicleList<float> ImpulseList { get { return result.impulse_; } } public int ImpulseCount { get { return result.ImpulseCount; } } public float GetImpulse(int index) { return result.GetImpulse(index); } public Builder SetImpulse(int index, float value) { result.impulse_[index] = value; return this; } public Builder AddImpulse(float value) { result.impulse_.Add(value); return this; } public Builder AddRangeImpulse(scg::IEnumerable<float> values) { base.AddRange(values, result.impulse_); return this; } public Builder ClearImpulse() { result.impulse_.Clear(); return this; } public bool HasOtherObjectReference { get { return result.HasOtherObjectReference; } } public pb::ByteString OtherObjectReference { get { return result.OtherObjectReference; } set { SetOtherObjectReference(value); } } public Builder SetOtherObjectReference(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); result.hasOtherObjectReference = true; result.otherObjectReference_ = value; return this; } public Builder ClearOtherObjectReference() { result.hasOtherObjectReference = false; result.otherObjectReference_ = pb::ByteString.Empty; return this; } } static CollisionBegin() { object.ReferenceEquals(global::Sirikata.Physics.Protocol._PBJ_Internal.Physics.Descriptor, null); } } public sealed partial class CollisionEnd : pb::GeneratedMessage<CollisionEnd, CollisionEnd.Builder> { private static readonly CollisionEnd defaultInstance = new Builder().BuildPartial(); public static CollisionEnd DefaultInstance { get { return defaultInstance; } } public override CollisionEnd DefaultInstanceForType { get { return defaultInstance; } } protected override CollisionEnd ThisMessage { get { return this; } } public static pbd::MessageDescriptor Descriptor { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.Physics.internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__Descriptor; } } protected override pb::FieldAccess.FieldAccessorTable<CollisionEnd, CollisionEnd.Builder> InternalFieldAccessors { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.Physics.internal__static_Sirikata_Physics_Protocol__PBJ_Internal_CollisionEnd__FieldAccessorTable; } } public const int TimestampFieldNumber = 2; private bool hasTimestamp; private ulong timestamp_ = 0; public bool HasTimestamp { get { return hasTimestamp; } } [global::System.CLSCompliant(false)] public ulong Timestamp { get { return timestamp_; } } public const int OtherObjectReferenceFieldNumber = 6; private bool hasOtherObjectReference; private pb::ByteString otherObjectReference_ = pb::ByteString.Empty; public bool HasOtherObjectReference { get { return hasOtherObjectReference; } } public pb::ByteString OtherObjectReference { get { return otherObjectReference_; } } public override bool IsInitialized { get { return true; } } public override void WriteTo(pb::CodedOutputStream output) { if (HasTimestamp) { output.WriteFixed64(2, Timestamp); } if (HasOtherObjectReference) { output.WriteBytes(6, OtherObjectReference); } UnknownFields.WriteTo(output); } private int memoizedSerializedSize = -1; public override int SerializedSize { get { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (HasTimestamp) { size += pb::CodedOutputStream.ComputeFixed64Size(2, Timestamp); } if (HasOtherObjectReference) { size += pb::CodedOutputStream.ComputeBytesSize(6, OtherObjectReference); } size += UnknownFields.SerializedSize; memoizedSerializedSize = size; return size; } } public static CollisionEnd ParseFrom(pb::ByteString data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static CollisionEnd ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static CollisionEnd ParseFrom(byte[] data) { return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed(); } public static CollisionEnd ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed(); } public static CollisionEnd ParseFrom(global::System.IO.Stream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static CollisionEnd ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static CollisionEnd ParseDelimitedFrom(global::System.IO.Stream input) { return CreateBuilder().MergeDelimitedFrom(input).BuildParsed(); } public static CollisionEnd ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) { return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed(); } public static CollisionEnd ParseFrom(pb::CodedInputStream input) { return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed(); } public static CollisionEnd ParseFrom(pb::CodedInputStream input, pb::ExtensionRegistry extensionRegistry) { return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed(); } public static Builder CreateBuilder() { return new Builder(); } public override Builder ToBuilder() { return CreateBuilder(this); } public override Builder CreateBuilderForType() { return new Builder(); } public static Builder CreateBuilder(CollisionEnd prototype) { return (Builder) new Builder().MergeFrom(prototype); } public sealed partial class Builder : pb::GeneratedBuilder<CollisionEnd, Builder> { protected override Builder ThisBuilder { get { return this; } } public Builder() {} CollisionEnd result = new CollisionEnd(); protected override CollisionEnd MessageBeingBuilt { get { return result; } } public override Builder Clear() { result = new CollisionEnd(); return this; } public override Builder Clone() { return new Builder().MergeFrom(result); } public override pbd::MessageDescriptor DescriptorForType { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd.Descriptor; } } public override CollisionEnd DefaultInstanceForType { get { return global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd.DefaultInstance; } } public override CollisionEnd BuildPartial() { if (result == null) { throw new global::System.InvalidOperationException("build() has already been called on this Builder"); } CollisionEnd returnMe = result; result = null; return returnMe; } public override Builder MergeFrom(pb::IMessage other) { if (other is CollisionEnd) { return MergeFrom((CollisionEnd) other); } else { base.MergeFrom(other); return this; } } public override Builder MergeFrom(CollisionEnd other) { if (other == global::Sirikata.Physics.Protocol._PBJ_Internal.CollisionEnd.DefaultInstance) return this; if (other.HasTimestamp) { Timestamp = other.Timestamp; } if (other.HasOtherObjectReference) { OtherObjectReference = other.OtherObjectReference; } this.MergeUnknownFields(other.UnknownFields); return this; } public override Builder MergeFrom(pb::CodedInputStream input) { return MergeFrom(input, pb::ExtensionRegistry.Empty); } public override Builder MergeFrom(pb::CodedInputStream input, pb::ExtensionRegistry extensionRegistry) { pb::UnknownFieldSet.Builder unknownFields = null; while (true) { uint tag = input.ReadTag(); switch (tag) { case 0: { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } default: { if (pb::WireFormat.IsEndGroupTag(tag)) { if (unknownFields != null) { this.UnknownFields = unknownFields.Build(); } return this; } if (unknownFields == null) { unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields); } ParseUnknownField(input, unknownFields, extensionRegistry, tag); break; } case 17: { Timestamp = input.ReadFixed64(); break; } case 50: { OtherObjectReference = input.ReadBytes(); break; } } } } public bool HasTimestamp { get { return result.HasTimestamp; } } [global::System.CLSCompliant(false)] public ulong Timestamp { get { return result.Timestamp; } set { SetTimestamp(value); } } [global::System.CLSCompliant(false)] public Builder SetTimestamp(ulong value) { result.hasTimestamp = true; result.timestamp_ = value; return this; } public Builder ClearTimestamp() { result.hasTimestamp = false; result.timestamp_ = 0; return this; } public bool HasOtherObjectReference { get { return result.HasOtherObjectReference; } } public pb::ByteString OtherObjectReference { get { return result.OtherObjectReference; } set { SetOtherObjectReference(value); } } public Builder SetOtherObjectReference(pb::ByteString value) { pb::ThrowHelper.ThrowIfNull(value, "value"); result.hasOtherObjectReference = true; result.otherObjectReference_ = value; return this; } public Builder ClearOtherObjectReference() { result.hasOtherObjectReference = false; result.otherObjectReference_ = pb::ByteString.Empty; return this; } } static CollisionEnd() { object.ReferenceEquals(global::Sirikata.Physics.Protocol._PBJ_Internal.Physics.Descriptor, null); } } #endregion }
using org.dmxc.lumos.Kernel.Input; using org.dmxc.lumos.Kernel.Resource; using System; using System.Xml.Linq; using System.Globalization; namespace MidiPlugin { [FriendlyName("Encoder")] public class EncoderRule : DeviceRule { private static NumberFormatInfo nfi = new NumberFormatInfo { NumberDecimalSeparator = "." }; private const string nolearn = "LearnMode disabled."; private const string learn1 = "Turn the encoder clockwise."; private const string learn2 = "Turn the encoder counter clockwise."; private double value; private bool first = true; private MidiInputChannel c = null; public override event EventHandler LearningFinished; public MidiMessage CWMessage { get; set; } public MidiMessage MinimumBacktrack { get; set; } public MidiMessage CCWMessage { get; set; } public MidiMessage MaximumBacktrack { get; set; } public double Increment { get; set; } public override string ControlType { get { return "Encoder"; } } public override double Value { get { return this.value; } set { this.value = value; if (this.value > 1.0) { this.value = 1.0; } if (this.value < 0.0) { this.value = 0.0; } this.UpdateBacktrack(); } } public override string LearnStatus { get; protected set; } public bool LearnMode { get; private set; } public EncoderRule() { this.LearnStatus = nolearn; } public override void Init(ManagedTreeItem i) { base.Init(i); if (i.hasValue<double>("Increment")) { this.Increment = i.getValue<double>("Increment"); } if (i.hasValue<int>("CWMessage")) { this.CWMessage = new MidiMessage { Data = i.getValue<int>("CWMessage") }; } if (i.hasValue<int>("MinimumBacktrack")) { this.MinimumBacktrack = new MidiMessage { Data = i.getValue<int>("MinimumBacktrack") }; } if (i.hasValue<int>("CCWMessage")) { this.CCWMessage = new MidiMessage { Data = i.getValue<int>("CCWMessage") }; } if (i.hasValue<int>("MaximumBacktrack")) { this.MaximumBacktrack = new MidiMessage { Data = i.getValue<int>("MaximumBacktrack") }; } if (i.hasValue<double>("Value")) { this.Value = i.getValue<double>("Value"); } } public override void Save(ManagedTreeItem i) { ContextManager.log.Debug("Saving EncoderRule {0}, {1}, {2}", CWMessage.Data, CCWMessage.Data, Increment); base.Save(i); i.setValue<double>("Value", this.Value); i.setValue<double>("Increment", this.Increment); i.setValue<int>("CWMessage", this.CWMessage.Data); i.setValue<int>("MinimumBacktrack", this.MinimumBacktrack.Data); i.setValue<int>("CCWMessage", this.CCWMessage.Data); i.setValue<int>("MaximumBacktrack", this.MaximumBacktrack.Data); } protected override void Serialize(XElement item) { item.Add(new XAttribute("Increment", this.Increment.ToString(nfi))); item.Add(new XAttribute("CWMessage", this.CWMessage.Data)); item.Add(new XAttribute("MinimumBacktrack", this.MinimumBacktrack.Data)); item.Add(new XAttribute("CCWMessage", this.CCWMessage.Data)); item.Add(new XAttribute("MaximumBacktrack", this.MaximumBacktrack.Data)); } protected override void Deserialize(XElement item) { this.Increment = double.Parse(item.Attribute("Increment").Value, nfi); this.CWMessage = new MidiMessage { Data = int.Parse(item.Attribute("CWMessage").Value) }; this.MinimumBacktrack = new MidiMessage { Data = int.Parse(item.Attribute("MinimumBacktrack").Value) }; this.CCWMessage = new MidiMessage { Data = int.Parse(item.Attribute("CCWMessage").Value) }; this.MaximumBacktrack = new MidiMessage { Data = int.Parse(item.Attribute("MaximumBacktrack").Value) }; } public override void Process(MidiMessage m) { if (m.Equals(this.CWMessage)) { this.Value += this.Increment; base.OnValueChanged(); } if (m.Equals(this.CCWMessage)) { this.Value -= this.Increment; base.OnValueChanged(); } } public override void UpdateBacktrack() { int delta = (int)(this.MaximumBacktrack.data2 - this.MinimumBacktrack.data2); MidiMessage msg = this.MaximumBacktrack; msg.data2 = (byte)((double)this.MinimumBacktrack.data2 + (double)delta * this.Value); base.OnSendMessage(msg); } public override void BeginLearn() { this.LearnMode = true; this.first = true; this.LearnStatus = learn1; } public override void CancelLearn() { this.EndLearn(); } private void EndLearn() { this.LearnMode = false; this.LearnStatus = nolearn; if (this.LearningFinished != null) { this.LearningFinished(this, EventArgs.Empty); } } public override bool TryLearnMessage(MidiMessage m) { bool result; if (!this.LearnMode) { result = false; } else { if (this.first) { this.CWMessage = m; this.first = false; this.LearnStatus = learn2; if (base.UseBacktrack) { MidiMessage m2 = m; m2.data2 = 0; this.MinimumBacktrack = m2; } } else { if (m == this.CWMessage) { result = false; return result; } MidiMessage m2 = m; m2.data2 = 127; this.CCWMessage = m; if (base.UseBacktrack) { this.MaximumBacktrack = m2; } this.EndLearn(); } result = true; } return result; } public override MidiInputChannel GetInputChannel(IInputLayer parent) { if (this.c == null) { this.c = new MidiRangeInputChannel(parent, this); } return this.c; } } }
// // Copyright (c) 2004-2021 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.UnitTests.Fluent { using System; using System.Collections.Generic; using System.IO; using NLog.Config; using NLog.Fluent; using NLog.Targets; using Xunit; [Obsolete("Obsoleted since it allocates unnecessary. Instead use ILogger.ForLogEvent and LogEventBuilder. Obsoleted in NLog 5.0")] public class LogBuilderTests : NLogTestBase { private static readonly Logger _logger = LogManager.GetLogger("logger1"); private LogEventInfo _lastLogEventInfo; public LogBuilderTests() { var configuration = new LoggingConfiguration(); var t1 = new MethodCallTarget("t1", (l, parms) => _lastLogEventInfo = l); t1.Parameters.Add(new MethodCallParameter("CallSite", "${callsite}")); var t2 = new DebugTarget { Name = "t2", Layout = "${message}" }; configuration.AddTarget(t1); configuration.AddTarget(t2); configuration.LoggingRules.Add(new LoggingRule("*", LogLevel.Trace, t1)); configuration.LoggingRules.Add(new LoggingRule("*", LogLevel.Trace, t2)); LogManager.Configuration = configuration; } [Fact] public void TraceWrite() { TraceWrite_internal(() => _logger.Trace()); } #if !NET35 && !NET40 [Fact] public void TraceWrite_static_builder() { TraceWrite_internal(() => Log.Trace(), true); } #endif ///<remarks> /// func because 1 logbuilder creates 1 message /// /// Caution: don't use overloading, that will break xUnit: /// CATASTROPHIC ERROR OCCURRED: /// System.ArgumentException: Ambiguous method named TraceWrite in type NLog.UnitTests.Fluent.LogBuilderTests /// </remarks> private void TraceWrite_internal(Func<LogBuilder> logBuilder, bool isStatic = false) { logBuilder() .Message("This is a test fluent message.") .Property("Test", "TraceWrite") .Write(); var loggerName = isStatic ? "LogBuilderTests" : "logger1"; { var expectedEvent = new LogEventInfo(LogLevel.Trace, loggerName, "This is a test fluent message."); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); } var ticks = DateTime.Now.Ticks; logBuilder() .Message("This is a test fluent message '{0}'.", ticks) .Property("Test", "TraceWrite") .Write(); { var rendered = $"This is a test fluent message '{ticks}'."; var expectedEvent = new LogEventInfo(LogLevel.Trace, loggerName, "This is a test fluent message '{0}'."); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessage("t2", rendered); } } [Fact] public void TraceWriteProperties() { var props = new Dictionary<string, object> { {"prop1", "1"}, {"prop2", "2"}, }; _logger.Trace() .Message("This is a test fluent message.") .Properties(props).Write(); { var expectedEvent = new LogEventInfo(LogLevel.Trace, "logger1", "This is a test fluent message."); expectedEvent.Properties["prop1"] = "1"; expectedEvent.Properties["prop2"] = "2"; AssertLastLogEventTarget(expectedEvent); } } [Fact] public void WarnWriteProperties() { var props = new Dictionary<string, object> { {"prop1", "1"}, {"prop2", "2"}, }; _logger.Warn() .Message("This is a test fluent message.") .Properties(props).Write(); { var expectedEvent = new LogEventInfo(LogLevel.Warn, "logger1", "This is a test fluent message."); expectedEvent.Properties["prop1"] = "1"; expectedEvent.Properties["prop2"] = "2"; AssertLastLogEventTarget(expectedEvent); } } [Fact] public void LogWriteProperties() { var props = new Dictionary<string, object> { {"prop1", "1"}, {"prop2", "2"}, }; // Loop to verify caller-attribute-caching-lookup for (int i = 0; i < 2; ++i) { _logger.Log(LogLevel.Fatal) .Message("This is a test fluent message.") .Properties(props).Write(); var expectedEvent = new LogEventInfo(LogLevel.Fatal, "logger1", "This is a test fluent message."); expectedEvent.Properties["prop1"] = "1"; expectedEvent.Properties["prop2"] = "2"; AssertLastLogEventTarget(expectedEvent); #if !NET35 && !NET40 Assert.Equal(GetType().ToString(), _lastLogEventInfo.CallerClassName); #endif } } [Fact] public void LogOffWriteProperties() { var props = new Dictionary<string, object> { {"prop1", "1"}, {"prop2", "2"}, }; var props2 = new Dictionary<string, object> { {"prop1", "4"}, {"prop2", "5"}, }; _logger.Log(LogLevel.Fatal) .Message("This is a test fluent message.") .Properties(props).Write(); _logger.Log(LogLevel.Off) .Message("dont log this.") .Properties(props2).Write(); { var expectedEvent = new LogEventInfo(LogLevel.Fatal, "logger1", "This is a test fluent message."); expectedEvent.Properties["prop1"] = "1"; expectedEvent.Properties["prop2"] = "2"; AssertLastLogEventTarget(expectedEvent); } } #if !NET35 && !NET40 [Fact] public void LevelWriteProperties() { var props = new Dictionary<string, object> { {"prop1", "1"}, {"prop2", "2"}, }; Log.Level(LogLevel.Fatal) .Message("This is a test fluent message.") .Properties(props).Write(); { var expectedEvent = new LogEventInfo(LogLevel.Fatal, "LogBuilderTests", "This is a test fluent message."); expectedEvent.Properties["prop1"] = "1"; expectedEvent.Properties["prop2"] = "2"; AssertLastLogEventTarget(expectedEvent); } } #endif [Fact] public void TraceIfWrite() { _logger.Trace() .Message("This is a test fluent message.1") .Property("Test", "TraceWrite") .Write(); { var expectedEvent = new LogEventInfo(LogLevel.Trace, "logger1", "This is a test fluent message.1"); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); } int v = 1; _logger.Trace() .Message("This is a test fluent WriteIf message '{0}'.", DateTime.Now.Ticks) .Property("Test", "TraceWrite") .WriteIf(() => v == 1); { var expectedEvent = new LogEventInfo(LogLevel.Trace, "logger1", "This is a test fluent WriteIf message '{0}'."); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "This is a test fluent WriteIf message "); } _logger.Trace() .Message("dont write this! '{0}'.", DateTime.Now.Ticks) .Property("Test", "TraceWrite") .WriteIf(() => { return false; }); { var expectedEvent = new LogEventInfo(LogLevel.Trace, "logger1", "This is a test fluent WriteIf message '{0}'."); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "This is a test fluent WriteIf message "); } _logger.Trace() .Message("This is a test fluent WriteIf message '{0}'.", DateTime.Now.Ticks) .Property("Test", "TraceWrite") .WriteIf(v == 1); { var expectedEvent = new LogEventInfo(LogLevel.Trace, "logger1", "This is a test fluent WriteIf message '{0}'."); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "This is a test fluent WriteIf message "); } _logger.Trace() .Message("Should Not WriteIf message '{0}'.", DateTime.Now.Ticks) .Property("Test", "TraceWrite") .WriteIf(v > 1); { //previous var expectedEvent = new LogEventInfo(LogLevel.Trace, "logger1", "This is a test fluent WriteIf message '{0}'."); expectedEvent.Properties["Test"] = "TraceWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "This is a test fluent WriteIf message "); } } [Fact] public void InfoWrite() { InfoWrite_internal(() => _logger.Info()); } #if !NET35 && !NET40 [Fact] public void InfoWrite_static_builder() { InfoWrite_internal(() => Log.Info(), true); } #endif ///<remarks> /// func because 1 logbuilder creates 1 message /// /// Caution: don't use overloading, that will break xUnit: /// CATASTROPHIC ERROR OCCURRED: /// System.ArgumentException: Ambiguous method named TraceWrite in type NLog.UnitTests.Fluent.LogBuilderTests /// </remarks> private void InfoWrite_internal(Func<LogBuilder> logBuilder, bool isStatic = false) { logBuilder() .Message("This is a test fluent message.") .Property("Test", "InfoWrite") .Write(); var loggerName = isStatic ? "LogBuilderTests" : "logger1"; { //previous var expectedEvent = new LogEventInfo(LogLevel.Info, loggerName, "This is a test fluent message."); expectedEvent.Properties["Test"] = "InfoWrite"; AssertLastLogEventTarget(expectedEvent); } logBuilder() .Message("This is a test fluent message '{0}'.", DateTime.Now.Ticks) .Property("Test", "InfoWrite") .Write(); { //previous var expectedEvent = new LogEventInfo(LogLevel.Info, loggerName, "This is a test fluent message '{0}'."); expectedEvent.Properties["Test"] = "InfoWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "This is a test fluent message '"); } } [Fact] public void DebugWrite() { ErrorWrite_internal(() => _logger.Debug(), LogLevel.Debug); } #if !NET35 && !NET40 [Fact] public void DebugWrite_static_builder() { ErrorWrite_internal(() => Log.Debug(), LogLevel.Debug, true); } #endif [Fact] public void FatalWrite() { ErrorWrite_internal(() => _logger.Fatal(), LogLevel.Fatal); } #if !NET35 && !NET40 [Fact] public void FatalWrite_static_builder() { ErrorWrite_internal(() => Log.Fatal(), LogLevel.Fatal, true); } #endif [Fact] public void ErrorWrite() { ErrorWrite_internal(() => _logger.Error(), LogLevel.Error); } #if !NET35 && !NET40 [Fact] public void ErrorWrite_static_builder() { ErrorWrite_internal(() => Log.Error(), LogLevel.Error, true); } #endif [Fact] public void LogBuilder_null_lead_to_ArgumentNullException() { var logger = LogManager.GetLogger("a"); Assert.Throws<ArgumentNullException>(() => new LogBuilder(null, LogLevel.Debug)); Assert.Throws<ArgumentNullException>(() => new LogBuilder(null)); Assert.Throws<ArgumentNullException>(() => new LogBuilder(logger, null)); var logBuilder = new LogBuilder(logger); Assert.Throws<ArgumentNullException>(() => logBuilder.Properties(null)); Assert.Throws<ArgumentNullException>(() => logBuilder.Property(null, "b")); } [Fact] public void LogBuilder_nLogEventInfo() { var d = new DateTime(2015, 01, 30, 14, 30, 5); var logEventInfo = new LogBuilder(LogManager.GetLogger("a")).LoggerName("b").Level(LogLevel.Fatal).TimeStamp(d).LogEventInfo; Assert.Equal("b", logEventInfo.LoggerName); Assert.Equal(LogLevel.Fatal, logEventInfo.Level); Assert.Equal(d, logEventInfo.TimeStamp); } [Fact] public void LogBuilder_exception_only() { var ex = new Exception("Exception message1"); _logger.Error() .Exception(ex) .Write(); var expectedEvent = new LogEventInfo(LogLevel.Error, "logger1", null) { Exception = ex }; AssertLastLogEventTarget(expectedEvent); } [Fact] public void LogBuilder_null_logLevel() { Assert.Throws<ArgumentNullException>(() => _logger.Error().Level(null)); } [Fact] public void LogBuilder_message_overloadsTest() { LogManager.ThrowExceptions = true; _logger.Debug() .Message("Message with {0} arg", 1) .Write(); AssertDebugLastMessage("t2", "Message with 1 arg"); _logger.Debug() .Message("Message with {0} args. {1}", 2, "YES") .Write(); AssertDebugLastMessage("t2", "Message with 2 args. YES"); _logger.Debug() .Message("Message with {0} args. {1} {2}", 3, ":) ", 2) .Write(); AssertDebugLastMessage("t2", "Message with 3 args. :) 2"); _logger.Debug() .Message("Message with {0} args. {1} {2}{3}", "more", ":) ", 2, "b") .Write(); AssertDebugLastMessage("t2", "Message with more args. :) 2b"); } [Fact] public void LogBuilder_message_cultureTest() { if (IsLinux()) { Console.WriteLine("[SKIP] LogBuilderTests.LogBuilder_message_cultureTest because we are running in Travis"); return; } LogManager.Configuration.DefaultCultureInfo = GetCultureInfo("en-US"); _logger.Debug() .Message("Message with {0} {1} {2} {3}", 4.1, 4.001, new DateTime(2016, 12, 31), true) .Write(); AssertDebugLastMessage("t2", "Message with 4.1 4.001 12/31/2016 12:00:00 AM True"); _logger.Debug() .Message(GetCultureInfo("nl-nl"), "Message with {0} {1} {2} {3}", 4.1, 4.001, new DateTime(2016, 12, 31), true) .Write(); AssertDebugLastMessage("t2", "Message with 4,1 4,001 31-12-2016 00:00:00 True"); } [Fact] public void LogBuilder_Structured_Logging_Test() { var logEvent = _logger.Info().Property("Property1Key", "Property1Value").Message("{@message}", "My custom message").LogEventInfo; Assert.NotEmpty(logEvent.Properties); Assert.Contains("message", logEvent.Properties.Keys); Assert.Contains("Property1Key", logEvent.Properties.Keys); } ///<remarks> /// func because 1 logbuilder creates 1 message /// /// Caution: don't use overloading, that will break xUnit: /// CATASTROPHIC ERROR OCCURRED: /// System.ArgumentException: Ambiguous method named TraceWrite in type NLog.UnitTests.Fluent.LogBuilderTests /// </remarks> private void ErrorWrite_internal(Func<LogBuilder> logBuilder, LogLevel logLevel, bool isStatic = false) { Exception catchedException = null; string path = "blah.txt"; try { string text = File.ReadAllText(path); } catch (Exception ex) { catchedException = ex; logBuilder() .Message("Error reading file '{0}'.", path) .Exception(ex) .Property("Test", "ErrorWrite") .Write(); } var loggerName = isStatic ? "LogBuilderTests" : "logger1"; { var expectedEvent = new LogEventInfo(logLevel, loggerName, "Error reading file '{0}'."); expectedEvent.Properties["Test"] = "ErrorWrite"; expectedEvent.Exception = catchedException; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "Error reading file '"); } logBuilder() .Message("This is a test fluent message.") .Property("Test", "ErrorWrite") .Write(); { var expectedEvent = new LogEventInfo(logLevel, loggerName, "This is a test fluent message."); expectedEvent.Properties["Test"] = "ErrorWrite"; AssertLastLogEventTarget(expectedEvent); } logBuilder() .Message("This is a test fluent message '{0}'.", DateTime.Now.Ticks) .Property("Test", "ErrorWrite") .Write(); { var expectedEvent = new LogEventInfo(logLevel, loggerName, "This is a test fluent message '{0}'."); expectedEvent.Properties["Test"] = "ErrorWrite"; AssertLastLogEventTarget(expectedEvent); AssertDebugLastMessageContains("t2", "This is a test fluent message '"); } } /// <summary> /// Test the written logevent /// </summary> /// <param name="expected">exptected event to be logged.</param> void AssertLastLogEventTarget(LogEventInfo expected) { Assert.NotNull(_lastLogEventInfo); Assert.Equal(expected.Message, _lastLogEventInfo.Message); Assert.NotNull(_lastLogEventInfo.Properties); Assert.Equal(expected.Properties, _lastLogEventInfo.Properties); Assert.Equal(expected.LoggerName, _lastLogEventInfo.LoggerName); Assert.Equal(expected.Level, _lastLogEventInfo.Level); Assert.Equal(expected.Exception, _lastLogEventInfo.Exception); Assert.Equal(expected.FormatProvider, _lastLogEventInfo.FormatProvider); } } }
using EasyLOB.Library; using System; using System.IO; using System.Net; using System.Net.FtpClient; // Install-Package System.Net.FtpClient /* /a/b/c.xlsx Directory a b File c.xlsx DirectoryPath /a /a/b FilePath /a/b/c.xlsx FTP /EDM <= Root Directory /Entity1 000000000 <= Key 0 to 99 000000100 <= Key 100 to 199 ... /Entity2 000000000 000000100 ... */ namespace EasyLOB.Extensions.Edm { /// <summary> /// EDM Manager - FTP /// </summary> public partial class EdmManagerFTP : IEdmManager, IDisposable { #region Properties /// <summary> /// FTP client. /// </summary> private FtpClient ftpClient; #endregion Properties #region Properties Interface public string RootDirectory { get; } #endregion Properties Interface #region Methods public EdmManagerFTP() { RootDirectory = LibraryHelper.AppSettings<string>("EDM.FTPRoot"); ftpClient = new FtpClient(); ftpClient.Host = LibraryHelper.AppSettings<string>("EDM.FTPServer"); ftpClient.Port = LibraryHelper.AppSettings<int>("EDM.FTPPort"); ftpClient.Credentials = new NetworkCredential(LibraryHelper.AppSettings<string>("EDM.FTPUser"), LibraryHelper.AppSettings<string>("EDM.FTPPassword")); ftpClient.Connect(); } public EdmManagerFTP(string rootDirectory) : this() { RootDirectory = rootDirectory; } public void Dispose() { if (ftpClient != null) { ftpClient.Disconnect(); } } /// <summary> /// Create FTP Directory. /// </summary> /// <param name="directory">Directory</param> /// <returns>Ok ?</returns> protected bool FTPCreateDirectory(string directory) { ftpClient.CreateDirectory(directory); return FTPDirectoryExists(directory); } /// <summary> /// Create FTP Path. /// </summary> /// <param name="directoryPath"></param> /// <returns>Ok ?</returns> protected bool FTPCreateDirectoryPath(string directoryPath) { string workingDirectory = ""; try { workingDirectory = ftpClient.GetWorkingDirectory(); ftpClient.SetWorkingDirectory("/"); string currentDirectory = ""; var separators = new char[] { Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar }; string[] directories = directoryPath.Split(separators); foreach (string directory in directories) { if (!ftpClient.DirectoryExists(directory)) { ftpClient.CreateDirectory(directory); } currentDirectory += "/" + directory; ftpClient.SetWorkingDirectory(currentDirectory); } } finally { if (!String.IsNullOrEmpty(workingDirectory)) { ftpClient.SetWorkingDirectory(workingDirectory); } } return FTPDirectoryExists(directoryPath); } /// <summary> /// FTP Directory exists ? /// </summary> /// <param name="directory">Directory</param> /// <returns>Exists ?</returns> protected bool FTPDirectoryExists(string directory) { bool result; string workingDirectory = ""; try { workingDirectory = ftpClient.GetWorkingDirectory(); ftpClient.SetWorkingDirectory("/"); result = ftpClient.DirectoryExists(directory); } finally { if (!String.IsNullOrEmpty(workingDirectory)) { ftpClient.SetWorkingDirectory(workingDirectory); } } return result; } /// <summary> /// FTP File exists ? /// </summary> /// <param name="filePath"></param> /// <returns>Exists ?</returns> protected bool FTPFileExists(string filePath) { bool result; string workingDirectory = ""; try { workingDirectory = ftpClient.GetWorkingDirectory(); ftpClient.SetWorkingDirectory("/"); result = ftpClient.FileExists(filePath, FtpListOption.ForceList | FtpListOption.AllFiles); } finally { if (!String.IsNullOrEmpty(workingDirectory)) { ftpClient.SetWorkingDirectory(workingDirectory); } } return result; } #endregion Methods #region Methods Interface public bool DeleteFile(int key, ZFileTypes fileType) { return DeleteFile("", key, fileType); } public bool DeleteFile(string entityName, int key, ZFileTypes fileType) { bool result; string filePath = GetFilePath(entityName, key, fileType, false); if (FTPFileExists(filePath)) { ftpClient.DeleteFile(filePath); result = true; } else { result = false; } return result; } public bool DeleteFile(string edmFilePath) { bool result; string filePath = GetFilePath(edmFilePath, false); if (FTPFileExists(filePath)) { ftpClient.DeleteFile(filePath); result = true; } else { result = false; } return result; } public bool FileExists(int key, ZFileTypes fileType) { return FileExists("", key, fileType); } public bool FileExists(string entityName, int key, ZFileTypes fileType) { return FTPFileExists(GetFilePath(entityName, key, fileType, false)); } public bool FileExists(string edmFilePath) { return FTPFileExists(GetFilePath(edmFilePath, false)); } public string GetFilePath(int key, ZFileTypes fileType, bool create) { return GetFilePath("", key, fileType, create); } public string GetFilePath(string entityName, int key, ZFileTypes fileType, bool create) { string filePath = ""; string extension = LibraryHelper.GetFileExtension(fileType); string workingDirectory = ""; try { workingDirectory = ftpClient.GetWorkingDirectory(); entityName = (entityName == null) ? "" : entityName; string entityKey = String.Format("{0:000000000}", (key / 100) * 100); string directoryPath = LibraryHelper.AddDirectorySeparator(RootDirectory) + ((entityName == "") ? entityName : entityName + "/") + entityKey; FTPCreateDirectoryPath(directoryPath); /* ftpClient.SetWorkingDirectory("/"); if (!FTPDirectoryExists(path) && create) // /root/entityName/entityTree { if (FTPCreateDirectory(RootDirectory)) { ftpClient.SetWorkingDirectory(RootDirectory); if (FTPCreateDirectory(entityName)) { ftpClient.SetWorkingDirectory(entityName); FTPCreateDirectory(entityTree); } } } */ ftpClient.SetWorkingDirectory("/"); if (FTPDirectoryExists(directoryPath)) { filePath = directoryPath + "/" + String.Format("{0:000000000}", key) + extension; } } finally { if (!String.IsNullOrEmpty(workingDirectory)) { ftpClient.SetWorkingDirectory(workingDirectory); } } return filePath; } public string GetFilePath(string edmFilePath, bool create) { string filePath = ""; string directoryPath = Path.GetDirectoryName(LibraryHelper.AddDirectorySeparator(RootDirectory) + edmFilePath); if (!FTPDirectoryExists(directoryPath) && create) { FTPCreateDirectoryPath(directoryPath); } if (FTPDirectoryExists(directoryPath)) { filePath = LibraryHelper.AddDirectorySeparator(RootDirectory) + edmFilePath; } return filePath; } public byte[] ReadFile(int key, ZFileTypes fileType) { return ReadFile("", key, fileType); } public byte[] ReadFile(string entityName, int key, ZFileTypes fileType) { byte[] file = new byte[0]; string path = GetFilePath(entityName, key, fileType, false); if (FTPFileExists(path)) { ftpClient.SetWorkingDirectory("/"); using (var ftpStream = ftpClient.OpenRead(path)) using (var memoryStream = new MemoryStream((int)ftpStream.Length)) { int count; byte[] buffer = new byte[8 * 1024]; while ((count = ftpStream.Read(buffer, 0, buffer.Length)) > 0) { memoryStream.Write(buffer, 0, count); } file = memoryStream.ToArray(); } } return file; } public byte[] ReadFile(string filePath) { return new byte[0] { }; } public bool WriteFile(int key, ZFileTypes fileType, byte[] file) { return WriteFile("", key, fileType, file); } public bool WriteFile(int key, ZFileTypes fileType, string filePath) { return WriteFile("", key, fileType, filePath); } public bool WriteFile(string entityName, int key, ZFileTypes fileType, byte[] file) { bool result = false; ftpClient.SetWorkingDirectory("/"); using (var memoryStream = new MemoryStream(file)) using (var ftpStream = ftpClient.OpenWrite(GetFilePath(entityName, key, fileType, true))) { int count; byte[] buffer = new byte[8 * 1024]; while ((count = memoryStream.Read(buffer, 0, buffer.Length)) > 0) { ftpStream.Write(buffer, 0, count); } } result = true; return result; } public bool WriteFile(string entityName, int key, ZFileTypes fileType, string filePath) { bool result = false; if (File.Exists(filePath)) { ftpClient.SetWorkingDirectory("/"); using (var fileStream = File.OpenRead(filePath)) using (var ftpStream = ftpClient.OpenWrite(GetFilePath(entityName, key, fileType, true))) { int count; byte[] buffer = new byte[8 * 1024]; while ((count = fileStream.Read(buffer, 0, buffer.Length)) > 0) { ftpStream.Write(buffer, 0, count); } } result = true; } return result; } public bool WriteFile(string edmFilePath, byte[] file) { bool result = false; ftpClient.SetWorkingDirectory("/"); using (var memoryStream = new MemoryStream(file)) using (var ftpStream = ftpClient.OpenWrite(GetFilePath(edmFilePath, true))) { int count; byte[] buffer = new byte[8 * 1024]; while ((count = memoryStream.Read(buffer, 0, buffer.Length)) > 0) { ftpStream.Write(buffer, 0, count); } } result = true; return result; } public bool WriteFile(string edmFilePath, string filePath) { bool result = false; if (File.Exists(filePath)) { ftpClient.SetWorkingDirectory("/"); using (var fileStream = File.OpenRead(filePath)) using (var ftpStream = ftpClient.OpenWrite(GetFilePath(edmFilePath, true))) { int count; byte[] buffer = new byte[8 * 1024]; while ((count = fileStream.Read(buffer, 0, buffer.Length)) > 0) { ftpStream.Write(buffer, 0, count); } } result = true; } return result; } #endregion Methods Interface } }
using System; using System.Collections.Generic; using System.IO; using System.Text; using System.Linq; namespace ForieroEditor.Platforms.iOS.Xcode { public class JsonElement { protected JsonElement() {} // convenience methods public string AsString() { return ((JsonElementString)this).value; } public int AsInteger() { return ((JsonElementInteger)this).value; } public bool AsBoolean() { return ((JsonElementBoolean)this).value; } public JsonElementArray AsArray() { return (JsonElementArray)this; } public JsonElementDict AsDict() { return (JsonElementDict)this; } public JsonElement this[string key] { get { return AsDict()[key]; } set { AsDict()[key] = value; } } } public class JsonElementString : JsonElement { public JsonElementString(string v) { value = v; } public string value; } public class JsonElementInteger : JsonElement { public JsonElementInteger(int v) { value = v; } public int value; } public class JsonElementBoolean : JsonElement { public JsonElementBoolean(bool v) { value = v; } public bool value; } public class JsonElementDict : JsonElement { public JsonElementDict() : base() {} private SortedDictionary<string, JsonElement> m_PrivateValue = new SortedDictionary<string, JsonElement>(); public IDictionary<string, JsonElement> values { get { return m_PrivateValue; }} new public JsonElement this[string key] { get { if (values.ContainsKey(key)) return values[key]; return null; } set { this.values[key] = value; } } public bool Contains(string key) { return values.ContainsKey(key); } public void Remove(string key) { values.Remove(key); } // convenience methods public void SetInteger(string key, int val) { values[key] = new JsonElementInteger(val); } public void SetString(string key, string val) { values[key] = new JsonElementString(val); } public void SetBoolean(string key, bool val) { values[key] = new JsonElementBoolean(val); } public JsonElementArray CreateArray(string key) { var v = new JsonElementArray(); values[key] = v; return v; } public JsonElementDict CreateDict(string key) { var v = new JsonElementDict(); values[key] = v; return v; } } public class JsonElementArray : JsonElement { public JsonElementArray() : base() {} public List<JsonElement> values = new List<JsonElement>(); // convenience methods public void AddString(string val) { values.Add(new JsonElementString(val)); } public void AddInteger(int val) { values.Add(new JsonElementInteger(val)); } public void AddBoolean(bool val) { values.Add(new JsonElementBoolean(val)); } public JsonElementArray AddArray() { var v = new JsonElementArray(); values.Add(v); return v; } public JsonElementDict AddDict() { var v = new JsonElementDict(); values.Add(v); return v; } } public class JsonDocument { public JsonElementDict root; public string indentString = " "; public JsonDocument() { root = new JsonElementDict(); } void AppendIndent(StringBuilder sb, int indent) { for (int i = 0; i < indent; ++i) sb.Append(indentString); } void WriteString(StringBuilder sb, string str) { // TODO: escape sb.Append('"'); sb.Append(str); sb.Append('"'); } void WriteBoolean(StringBuilder sb, bool value) { sb.Append(value ? "true" : "false"); } void WriteInteger(StringBuilder sb, int value) { sb.Append(value.ToString()); } void WriteDictKeyValue(StringBuilder sb, string key, JsonElement value, int indent) { sb.Append("\n"); AppendIndent(sb, indent); WriteString(sb, key); sb.Append(" : "); if (value is JsonElementString) WriteString(sb, value.AsString()); else if (value is JsonElementInteger) WriteInteger(sb, value.AsInteger()); else if (value is JsonElementBoolean) WriteBoolean(sb, value.AsBoolean()); else if (value is JsonElementDict) WriteDict(sb, value.AsDict(), indent); else if (value is JsonElementArray) WriteArray(sb, value.AsArray(), indent); } void WriteDict(StringBuilder sb, JsonElementDict el, int indent) { sb.Append("{"); bool hasElement = false; foreach (var key in el.values.Keys) { if (hasElement) sb.Append(","); // trailing commas not supported WriteDictKeyValue(sb, key, el[key], indent+1); hasElement = true; } sb.Append("\n"); AppendIndent(sb, indent); sb.Append("}"); } void WriteArray(StringBuilder sb, JsonElementArray el, int indent) { sb.Append("["); bool hasElement = false; foreach (var value in el.values) { if (hasElement) sb.Append(","); // trailing commas not supported sb.Append("\n"); AppendIndent(sb, indent+1); if (value is JsonElementString) WriteString(sb, value.AsString()); else if (value is JsonElementInteger) WriteInteger(sb, value.AsInteger()); else if (value is JsonElementBoolean) WriteBoolean(sb, value.AsBoolean()); else if (value is JsonElementDict) WriteDict(sb, value.AsDict(), indent+1); else if (value is JsonElementArray) WriteArray(sb, value.AsArray(), indent+1); hasElement = true; } sb.Append("\n"); AppendIndent(sb, indent); sb.Append("]"); } public void WriteToFile(string path) { File.WriteAllText(path, WriteToString()); } public void WriteToStream(TextWriter tw) { tw.Write(WriteToString()); } public string WriteToString() { var sb = new StringBuilder(); WriteDict(sb, root, 0); return sb.ToString(); } } } // namespace ForieroEditor.Platforms.iOS.Xcode
namespace java.lang.reflect { [global::MonoJavaBridge.JavaClass()] public sealed partial class Field : java.lang.reflect.AccessibleObject, Member { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; internal Field(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } private static global::MonoJavaBridge.MethodId _m0; public global::java.lang.Object get(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallObjectMethod(this, global::java.lang.reflect.Field.staticClass, "get", "(Ljava/lang/Object;)Ljava/lang/Object;", ref global::java.lang.reflect.Field._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)) as java.lang.Object; } private static global::MonoJavaBridge.MethodId _m1; public sealed override bool equals(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallBooleanMethod(this, global::java.lang.reflect.Field.staticClass, "equals", "(Ljava/lang/Object;)Z", ref global::java.lang.reflect.Field._m1, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m2; public sealed override global::java.lang.String toString() { return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.String>(this, global::java.lang.reflect.Field.staticClass, "toString", "()Ljava/lang/String;", ref global::java.lang.reflect.Field._m2) as java.lang.String; } private static global::MonoJavaBridge.MethodId _m3; public sealed override int hashCode() { return global::MonoJavaBridge.JavaBridge.CallIntMethod(this, global::java.lang.reflect.Field.staticClass, "hashCode", "()I", ref global::java.lang.reflect.Field._m3); } public new int Modifiers { get { return getModifiers(); } } private static global::MonoJavaBridge.MethodId _m4; public int getModifiers() { return global::MonoJavaBridge.JavaBridge.CallIntMethod(this, global::java.lang.reflect.Field.staticClass, "getModifiers", "()I", ref global::java.lang.reflect.Field._m4); } private static global::MonoJavaBridge.MethodId _m5; public bool getBoolean(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallBooleanMethod(this, global::java.lang.reflect.Field.staticClass, "getBoolean", "(Ljava/lang/Object;)Z", ref global::java.lang.reflect.Field._m5, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m6; public byte getByte(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallByteMethod(this, global::java.lang.reflect.Field.staticClass, "getByte", "(Ljava/lang/Object;)B", ref global::java.lang.reflect.Field._m6, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m7; public short getShort(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallShortMethod(this, global::java.lang.reflect.Field.staticClass, "getShort", "(Ljava/lang/Object;)S", ref global::java.lang.reflect.Field._m7, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m8; public char getChar(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallCharMethod(this, global::java.lang.reflect.Field.staticClass, "getChar", "(Ljava/lang/Object;)C", ref global::java.lang.reflect.Field._m8, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m9; public int getInt(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallIntMethod(this, global::java.lang.reflect.Field.staticClass, "getInt", "(Ljava/lang/Object;)I", ref global::java.lang.reflect.Field._m9, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m10; public long getLong(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallLongMethod(this, global::java.lang.reflect.Field.staticClass, "getLong", "(Ljava/lang/Object;)J", ref global::java.lang.reflect.Field._m10, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m11; public float getFloat(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallFloatMethod(this, global::java.lang.reflect.Field.staticClass, "getFloat", "(Ljava/lang/Object;)F", ref global::java.lang.reflect.Field._m11, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } private static global::MonoJavaBridge.MethodId _m12; public double getDouble(java.lang.Object arg0) { return global::MonoJavaBridge.JavaBridge.CallDoubleMethod(this, global::java.lang.reflect.Field.staticClass, "getDouble", "(Ljava/lang/Object;)D", ref global::java.lang.reflect.Field._m12, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)); } public new global::java.lang.String Name { get { return getName(); } } private static global::MonoJavaBridge.MethodId _m13; public global::java.lang.String getName() { return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.String>(this, global::java.lang.reflect.Field.staticClass, "getName", "()Ljava/lang/String;", ref global::java.lang.reflect.Field._m13) as java.lang.String; } private static global::MonoJavaBridge.MethodId _m14; public bool isSynthetic() { return global::MonoJavaBridge.JavaBridge.CallBooleanMethod(this, global::java.lang.reflect.Field.staticClass, "isSynthetic", "()Z", ref global::java.lang.reflect.Field._m14); } public new global::java.lang.Class DeclaringClass { get { return getDeclaringClass(); } } private static global::MonoJavaBridge.MethodId _m15; public global::java.lang.Class getDeclaringClass() { return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.Class>(this, global::java.lang.reflect.Field.staticClass, "getDeclaringClass", "()Ljava/lang/Class;", ref global::java.lang.reflect.Field._m15) as java.lang.Class; } private static global::MonoJavaBridge.MethodId _m16; public sealed override global::java.lang.annotation.Annotation getAnnotation(java.lang.Class arg0) { return global::MonoJavaBridge.JavaBridge.CallIJavaObjectMethod<java.lang.annotation.Annotation>(this, global::java.lang.reflect.Field.staticClass, "getAnnotation", "(Ljava/lang/Class;)Ljava/lang/annotation/Annotation;", ref global::java.lang.reflect.Field._m16, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0)) as java.lang.annotation.Annotation; } public new global::java.lang.annotation.Annotation[] DeclaredAnnotations { get { return getDeclaredAnnotations(); } } private static global::MonoJavaBridge.MethodId _m17; public sealed override global::java.lang.annotation.Annotation[] getDeclaredAnnotations() { return global::MonoJavaBridge.JavaBridge.CallArrayObjectMethod<java.lang.annotation.Annotation>(this, global::java.lang.reflect.Field.staticClass, "getDeclaredAnnotations", "()[Ljava/lang/annotation/Annotation;", ref global::java.lang.reflect.Field._m17) as java.lang.annotation.Annotation[]; } private static global::MonoJavaBridge.MethodId _m18; public bool isEnumConstant() { return global::MonoJavaBridge.JavaBridge.CallBooleanMethod(this, global::java.lang.reflect.Field.staticClass, "isEnumConstant", "()Z", ref global::java.lang.reflect.Field._m18); } public new global::java.lang.Class Type { get { return getType(); } } private static global::MonoJavaBridge.MethodId _m19; public global::java.lang.Class getType() { return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.Class>(this, global::java.lang.reflect.Field.staticClass, "getType", "()Ljava/lang/Class;", ref global::java.lang.reflect.Field._m19) as java.lang.Class; } public new global::java.lang.reflect.Type GenericType { get { return getGenericType(); } } private static global::MonoJavaBridge.MethodId _m20; public global::java.lang.reflect.Type getGenericType() { return global::MonoJavaBridge.JavaBridge.CallIJavaObjectMethod<java.lang.reflect.Type>(this, global::java.lang.reflect.Field.staticClass, "getGenericType", "()Ljava/lang/reflect/Type;", ref global::java.lang.reflect.Field._m20) as java.lang.reflect.Type; } private static global::MonoJavaBridge.MethodId _m21; public global::java.lang.String toGenericString() { return global::MonoJavaBridge.JavaBridge.CallSealedClassObjectMethod<java.lang.String>(this, global::java.lang.reflect.Field.staticClass, "toGenericString", "()Ljava/lang/String;", ref global::java.lang.reflect.Field._m21) as java.lang.String; } private static global::MonoJavaBridge.MethodId _m22; public void set(java.lang.Object arg0, java.lang.Object arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "set", "(Ljava/lang/Object;Ljava/lang/Object;)V", ref global::java.lang.reflect.Field._m22, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m23; public void setBoolean(java.lang.Object arg0, bool arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setBoolean", "(Ljava/lang/Object;Z)V", ref global::java.lang.reflect.Field._m23, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m24; public void setByte(java.lang.Object arg0, byte arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setByte", "(Ljava/lang/Object;B)V", ref global::java.lang.reflect.Field._m24, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m25; public void setChar(java.lang.Object arg0, char arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setChar", "(Ljava/lang/Object;C)V", ref global::java.lang.reflect.Field._m25, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m26; public void setShort(java.lang.Object arg0, short arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setShort", "(Ljava/lang/Object;S)V", ref global::java.lang.reflect.Field._m26, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m27; public void setInt(java.lang.Object arg0, int arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setInt", "(Ljava/lang/Object;I)V", ref global::java.lang.reflect.Field._m27, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m28; public void setLong(java.lang.Object arg0, long arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setLong", "(Ljava/lang/Object;J)V", ref global::java.lang.reflect.Field._m28, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m29; public void setFloat(java.lang.Object arg0, float arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setFloat", "(Ljava/lang/Object;F)V", ref global::java.lang.reflect.Field._m29, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m30; public void setDouble(java.lang.Object arg0, double arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::java.lang.reflect.Field.staticClass, "setDouble", "(Ljava/lang/Object;D)V", ref global::java.lang.reflect.Field._m30, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } static Field() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::java.lang.reflect.Field.staticClass = @__env.NewGlobalRef(@__env.FindClass("java/lang/reflect/Field")); } } }
// GzipInputStream.cs // // Copyright (C) 2001 Mike Krueger // // This file was translated from java, it was part of the GNU Classpath // Copyright (C) 2001 Free Software Foundation, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. using System; using System.IO; using ICSharpCode.SharpZipLib.Checksums; using ICSharpCode.SharpZipLib.Zip.Compression; using ICSharpCode.SharpZipLib.Zip.Compression.Streams; namespace ICSharpCode.SharpZipLib.GZip { /// <summary> /// This filter stream is used to decompress a "GZIP" format stream. /// The "GZIP" format is described baseInputStream RFC 1952. /// /// author of the original java version : John Leuner /// </summary> /// <example> This sample shows how to unzip a gzipped file /// <code> /// using System; /// using System.IO; /// /// using ICSharpCode.SharpZipLib.Core; /// using ICSharpCode.SharpZipLib.GZip; /// /// class MainClass /// { /// public static void Main(string[] args) /// { /// using (Stream inStream = new GZipInputStream(File.OpenRead(args[0]))) /// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) { /// byte[] buffer = new byte[4096]; /// StreamUtils.Copy(inStream, outStream, buffer); /// } /// } /// } /// </code> /// </example> public class GZipInputStream : InflaterInputStream { #region Instance Fields /// <summary> /// CRC-32 value for uncompressed data /// </summary> protected Crc32 crc = new Crc32(); /// <summary> /// Indicates end of stream /// </summary> protected bool eos; // Have we read the GZIP header yet? bool readGZIPHeader; #endregion #region Constructors /// <summary> /// Creates a GZipInputStream with the default buffer size /// </summary> /// <param name="baseInputStream"> /// The stream to read compressed data from (baseInputStream GZIP format) /// </param> public GZipInputStream(Stream baseInputStream) : this(baseInputStream, 4096) { } /// <summary> /// Creates a GZIPInputStream with the specified buffer size /// </summary> /// <param name="baseInputStream"> /// The stream to read compressed data from (baseInputStream GZIP format) /// </param> /// <param name="size"> /// Size of the buffer to use /// </param> public GZipInputStream(Stream baseInputStream, int size) : base(baseInputStream, new Inflater(true), size) { } #endregion #region Stream overrides /// <summary> /// Reads uncompressed data into an array of bytes /// </summary> /// <param name="buffer"> /// The buffer to read uncompressed data into /// </param> /// <param name="offset"> /// The offset indicating where the data should be placed /// </param> /// <param name="count"> /// The number of uncompressed bytes to be read /// </param> public override int Read(byte[] buffer, int offset, int count) { // We first have to read the GZIP header, then we feed all the // rest of the data to the base class. // // As we do that we continually update the CRC32. Once the data is // finished, we check the CRC32 // // This means we don't need our own buffer, as everything is done // in baseInputStream the superclass. if (!readGZIPHeader) { ReadHeader(); } if (eos) { return 0; } // We don't have to read the header, so we just grab data from the superclass int bytesRead = base.Read(buffer, offset, count); if (bytesRead > 0) { crc.Update(buffer, offset, bytesRead); } if (inf.IsFinished) { ReadFooter(); } return bytesRead; } #endregion #region Support routines void ReadHeader() { // 1. Check the two magic bytes Crc32 headCRC = new Crc32(); int magic = baseInputStream.ReadByte(); if (magic < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(magic); if (magic != (GZipConstants.GZIP_MAGIC >> 8)) { throw new GZipException("Error GZIP header, first magic byte doesn't match"); } magic = baseInputStream.ReadByte(); if (magic < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } if (magic != (GZipConstants.GZIP_MAGIC & 0xFF)) { throw new GZipException("Error GZIP header, second magic byte doesn't match"); } headCRC.Update(magic); // 2. Check the compression type (must be 8) int compressionType = baseInputStream.ReadByte(); if ( compressionType < 0 ) { throw new EndOfStreamException("EOS reading GZIP header"); } if ( compressionType != 8 ) { throw new GZipException("Error GZIP header, data not deflate format"); } headCRC.Update(compressionType); // 3. Check the flags int flags = baseInputStream.ReadByte(); if (flags < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(flags); /* This flag byte is divided into individual bits as follows: bit 0 FTEXT bit 1 FHCRC bit 2 FEXTRA bit 3 FNAME bit 4 FCOMMENT bit 5 reserved bit 6 reserved bit 7 reserved */ // 3.1 Check the reserved bits are zero if ((flags & 0xd0) != 0) { throw new GZipException("Reserved flag bits in GZIP header != 0"); } // 4.-6. Skip the modification time, extra flags, and OS type for (int i=0; i< 6; i++) { int readByte = baseInputStream.ReadByte(); if (readByte < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(readByte); } // 7. Read extra field if ((flags & GZipConstants.FEXTRA) != 0) { // Skip subfield id for (int i=0; i< 2; i++) { int readByte = baseInputStream.ReadByte(); if (readByte < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(readByte); } if (baseInputStream.ReadByte() < 0 || baseInputStream.ReadByte() < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } int len1, len2; len1 = baseInputStream.ReadByte(); len2 = baseInputStream.ReadByte(); if ((len1 < 0) || (len2 < 0)) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(len1); headCRC.Update(len2); int extraLen = (len1 << 8) | len2; for (int i = 0; i < extraLen;i++) { int readByte = baseInputStream.ReadByte(); if (readByte < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(readByte); } } // 8. Read file name if ((flags & GZipConstants.FNAME) != 0) { int readByte; while ( (readByte = baseInputStream.ReadByte()) > 0) { headCRC.Update(readByte); } if (readByte < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(readByte); } // 9. Read comment if ((flags & GZipConstants.FCOMMENT) != 0) { int readByte; while ( (readByte = baseInputStream.ReadByte()) > 0) { headCRC.Update(readByte); } if (readByte < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } headCRC.Update(readByte); } // 10. Read header CRC if ((flags & GZipConstants.FHCRC) != 0) { int tempByte; int crcval = baseInputStream.ReadByte(); if (crcval < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } tempByte = baseInputStream.ReadByte(); if (tempByte < 0) { throw new EndOfStreamException("EOS reading GZIP header"); } crcval = (crcval << 8) | tempByte; if (crcval != ((int) headCRC.Value & 0xffff)) { throw new GZipException("Header CRC value mismatch"); } } readGZIPHeader = true; } void ReadFooter() { byte[] footer = new byte[8]; int avail = inf.RemainingInput; if (avail > 8) { avail = 8; } System.Array.Copy(inputBuffer.RawData, inputBuffer.RawLength - inf.RemainingInput, footer, 0, avail); int needed = 8 - avail; while (needed > 0) { int count = baseInputStream.Read(footer, 8 - needed, needed); if (count <= 0) { throw new EndOfStreamException("EOS reading GZIP footer"); } needed -= count; // Jewel Jan 16 } int crcval = (footer[0] & 0xff) | ((footer[1] & 0xff) << 8) | ((footer[2] & 0xff) << 16) | (footer[3] << 24); if (crcval != (int) crc.Value) { throw new GZipException("GZIP crc sum mismatch, theirs \"" + crcval + "\" and ours \"" + (int) crc.Value); } int total = (footer[4] & 0xff) | ((footer[5] & 0xff) << 8) | ((footer[6] & 0xff) << 16) | (footer[7] << 24); if (total != inf.TotalOut) { throw new GZipException("Number of bytes mismatch in footer"); } // Should we support multiple gzip members. // Difficult, since there may be some bytes still in baseInputStream dataBuffer eos = true; } #endregion } }
// HtmlAgilityPack V1.0 - Simon Mourier <simon underscore mourier at hotmail dot com> using System; using System.IO; using System.Text; using System.Collections; namespace HtmlAgilityPack { /// <summary> /// Represents a document with mixed code and text. ASP, ASPX, JSP, are good example of such documents. /// </summary> public class MixedCodeDocument { private System.Text.Encoding _streamencoding = null; internal string _text; internal MixedCodeDocumentFragmentList _fragments; internal MixedCodeDocumentFragmentList _codefragments; internal MixedCodeDocumentFragmentList _textfragments; private ParseState _state; private int _index; private int _c; private int _line; private int _lineposition; private MixedCodeDocumentFragment _currentfragment; /// <summary> /// Gets or sets the token representing code start. /// </summary> public string TokenCodeStart = "<%"; /// <summary> /// Gets or sets the token representing code end. /// </summary> public string TokenCodeEnd = "%>"; /// <summary> /// Gets or sets the token representing code directive. /// </summary> public string TokenDirective = "@"; /// <summary> /// Gets or sets the token representing response write directive. /// </summary> public string TokenResponseWrite = "Response.Write "; private string TokenTextBlock = "TextBlock({0})"; /// <summary> /// Creates a mixed code document instance. /// </summary> public MixedCodeDocument() { _codefragments = new MixedCodeDocumentFragmentList(this); _textfragments = new MixedCodeDocumentFragmentList(this); _fragments = new MixedCodeDocumentFragmentList(this); } /// <summary> /// Loads a mixed code document from a stream. /// </summary> /// <param name="stream">The input stream.</param> public void Load(Stream stream) { Load(new StreamReader(stream)); } /// <summary> /// Loads a mixed code document from a stream. /// </summary> /// <param name="stream">The input stream.</param> /// <param name="detectEncodingFromByteOrderMarks">Indicates whether to look for byte order marks at the beginning of the file.</param> public void Load(Stream stream, bool detectEncodingFromByteOrderMarks) { Load(new StreamReader(stream, detectEncodingFromByteOrderMarks)); } /// <summary> /// Loads a mixed code document from a stream. /// </summary> /// <param name="stream">The input stream.</param> /// <param name="encoding">The character encoding to use.</param> public void Load(Stream stream, Encoding encoding) { Load(new StreamReader(stream, encoding)); } /// <summary> /// Loads a mixed code document from a stream. /// </summary> /// <param name="stream">The input stream.</param> /// <param name="encoding">The character encoding to use.</param> /// <param name="detectEncodingFromByteOrderMarks">Indicates whether to look for byte order marks at the beginning of the file.</param> public void Load(Stream stream, Encoding encoding, bool detectEncodingFromByteOrderMarks) { Load(new StreamReader(stream, encoding, detectEncodingFromByteOrderMarks)); } /// <summary> /// Loads a mixed code document from a stream. /// </summary> /// <param name="stream">The input stream.</param> /// <param name="encoding">The character encoding to use.</param> /// <param name="detectEncodingFromByteOrderMarks">Indicates whether to look for byte order marks at the beginning of the file.</param> /// <param name="buffersize">The minimum buffer size.</param> public void Load(Stream stream, Encoding encoding, bool detectEncodingFromByteOrderMarks, int buffersize) { Load(new StreamReader(stream, encoding, detectEncodingFromByteOrderMarks, buffersize)); } /// <summary> /// Loads a mixed code document from a file. /// </summary> /// <param name="path">The complete file path to be read.</param> public void Load(string path) { Load(new StreamReader(path)); } /// <summary> /// Loads a mixed code document from a file. /// </summary> /// <param name="path">The complete file path to be read.</param> /// <param name="detectEncodingFromByteOrderMarks">Indicates whether to look for byte order marks at the beginning of the file.</param> public void Load(string path, bool detectEncodingFromByteOrderMarks) { Load(new StreamReader(path, detectEncodingFromByteOrderMarks)); } /// <summary> /// Loads a mixed code document from a file. /// </summary> /// <param name="path">The complete file path to be read.</param> /// <param name="encoding">The character encoding to use.</param> public void Load(string path, Encoding encoding) { Load(new StreamReader(path, encoding)); } /// <summary> /// Loads a mixed code document from a file. /// </summary> /// <param name="path">The complete file path to be read.</param> /// <param name="encoding">The character encoding to use.</param> /// <param name="detectEncodingFromByteOrderMarks">Indicates whether to look for byte order marks at the beginning of the file.</param> public void Load(string path, Encoding encoding, bool detectEncodingFromByteOrderMarks) { Load(new StreamReader(path, encoding, detectEncodingFromByteOrderMarks)); } /// <summary> /// Loads a mixed code document from a file. /// </summary> /// <param name="path">The complete file path to be read.</param> /// <param name="encoding">The character encoding to use.</param> /// <param name="detectEncodingFromByteOrderMarks">Indicates whether to look for byte order marks at the beginning of the file.</param> /// <param name="buffersize">The minimum buffer size.</param> public void Load(string path, Encoding encoding, bool detectEncodingFromByteOrderMarks, int buffersize) { Load(new StreamReader(path, encoding, detectEncodingFromByteOrderMarks, buffersize)); } /// <summary> /// Loads a mixed document from a text /// </summary> /// <param name="html">The text to load.</param> public void LoadHtml(string html) { Load(new StringReader(html)); } /// <summary> /// Loads the mixed code document from the specified TextReader. /// </summary> /// <param name="reader">The TextReader used to feed the HTML data into the document.</param> public void Load(TextReader reader) { _codefragments.Clear(); _textfragments.Clear(); // all pseudo constructors get down to this one StreamReader sr = reader as StreamReader; if (sr != null) { _streamencoding = sr.CurrentEncoding; } _text = reader.ReadToEnd(); reader.Close(); Parse(); } internal System.Text.Encoding GetOutEncoding() { if (_streamencoding != null) return _streamencoding; return System.Text.Encoding.Default; } /// <summary> /// Gets the encoding of the stream used to read the document. /// </summary> public System.Text.Encoding StreamEncoding { get { return _streamencoding; } } /// <summary> /// Gets the list of code fragments in the document. /// </summary> public MixedCodeDocumentFragmentList CodeFragments { get { return _codefragments; } } /// <summary> /// Gets the list of text fragments in the document. /// </summary> public MixedCodeDocumentFragmentList TextFragments { get { return _textfragments; } } /// <summary> /// Gets the list of all fragments in the document. /// </summary> public MixedCodeDocumentFragmentList Fragments { get { return _fragments; } } /// <summary> /// Saves the mixed document to the specified stream. /// </summary> /// <param name="outStream">The stream to which you want to save.</param> public void Save(Stream outStream) { StreamWriter sw = new StreamWriter(outStream, GetOutEncoding()); Save(sw); } /// <summary> /// Saves the mixed document to the specified stream. /// </summary> /// <param name="outStream">The stream to which you want to save.</param> /// <param name="encoding">The character encoding to use.</param> public void Save(Stream outStream, System.Text.Encoding encoding) { StreamWriter sw = new StreamWriter(outStream, encoding); Save(sw); } /// <summary> /// Saves the mixed document to the specified file. /// </summary> /// <param name="filename">The location of the file where you want to save the document.</param> public void Save(string filename) { StreamWriter sw = new StreamWriter(filename, false, GetOutEncoding()); Save(sw); } /// <summary> /// Saves the mixed document to the specified file. /// </summary> /// <param name="filename">The location of the file where you want to save the document.</param> /// <param name="encoding">The character encoding to use.</param> public void Save(string filename, System.Text.Encoding encoding) { StreamWriter sw = new StreamWriter(filename, false, encoding); Save(sw); } /// <summary> /// Saves the mixed document to the specified StreamWriter. /// </summary> /// <param name="writer">The StreamWriter to which you want to save.</param> public void Save(StreamWriter writer) { Save((TextWriter)writer); } /// <summary> /// Saves the mixed document to the specified TextWriter. /// </summary> /// <param name="writer">The TextWriter to which you want to save.</param> public void Save(TextWriter writer) { writer.Flush(); } /// <summary> /// Gets the code represented by the mixed code document seen as a template. /// </summary> public string Code { get { string s = ""; int i = 0; foreach(MixedCodeDocumentFragment frag in _fragments) { switch(frag._type) { case MixedCodeDocumentFragmentType.Text: s += TokenResponseWrite + string.Format(TokenTextBlock, i) + "\n"; i++; break; case MixedCodeDocumentFragmentType.Code: s += ((MixedCodeDocumentCodeFragment)frag).Code + "\n"; break; } } return s; } } /// <summary> /// Create a text fragment instances. /// </summary> /// <returns>The newly created text fragment instance.</returns> public MixedCodeDocumentTextFragment CreateTextFragment() { return (MixedCodeDocumentTextFragment)CreateFragment(MixedCodeDocumentFragmentType.Text); } /// <summary> /// Create a code fragment instances. /// </summary> /// <returns>The newly created code fragment instance.</returns> public MixedCodeDocumentCodeFragment CreateCodeFragment() { return (MixedCodeDocumentCodeFragment)CreateFragment(MixedCodeDocumentFragmentType.Code); } internal MixedCodeDocumentFragment CreateFragment(MixedCodeDocumentFragmentType type) { switch(type) { case MixedCodeDocumentFragmentType.Text: return new MixedCodeDocumentTextFragment(this); case MixedCodeDocumentFragmentType.Code: return new MixedCodeDocumentCodeFragment(this); default: throw new NotSupportedException(); } } private void SetPosition() { _currentfragment._line = _line; _currentfragment._lineposition = _lineposition; _currentfragment._index = _index - 1; _currentfragment._length = 0; } private void IncrementPosition() { _index++; if (_c == 10) { _lineposition = 1; _line++; } else _lineposition++; } private enum ParseState { Text, Code } private void Parse() { _state = ParseState.Text; _index = 0; _currentfragment = CreateFragment(MixedCodeDocumentFragmentType.Text); while (_index<_text.Length) { _c = _text[_index]; IncrementPosition(); switch(_state) { case ParseState.Text: if (_index+TokenCodeStart.Length<_text.Length) { if (_text.Substring(_index-1, TokenCodeStart.Length) == TokenCodeStart) { _state = ParseState.Code; _currentfragment._length = _index -1 - _currentfragment._index; _currentfragment = CreateFragment(MixedCodeDocumentFragmentType.Code); SetPosition(); continue; } } break; case ParseState.Code: if (_index+TokenCodeEnd.Length<_text.Length) { if (_text.Substring(_index-1, TokenCodeEnd.Length) == TokenCodeEnd) { _state = ParseState.Text; _currentfragment._length = _index + TokenCodeEnd.Length - _currentfragment._index; _index += TokenCodeEnd.Length; _lineposition += TokenCodeEnd.Length; _currentfragment = CreateFragment(MixedCodeDocumentFragmentType.Text); SetPosition(); continue; } } break; } } _currentfragment._length = _index - _currentfragment._index; } } }
namespace Xilium.CefGlue { using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.InteropServices; using Xilium.CefGlue.Interop; /// <summary> /// Class representing a V8 value handle. V8 handles can only be accessed from /// the thread on which they are created. Valid threads for creating a V8 handle /// include the render process main thread (TID_RENDERER) and WebWorker threads. /// A task runner for posting tasks on the associated thread can be retrieved via /// the CefV8Context::GetTaskRunner() method. /// </summary> public sealed unsafe partial class CefV8Value { /// <summary> /// Create a new CefV8Value object of type undefined. /// </summary> public static cef_v8value_t* CreateUndefined() { throw new NotImplementedException(); // TODO: CefV8Value.CreateUndefined } /// <summary> /// Create a new CefV8Value object of type null. /// </summary> public static cef_v8value_t* CreateNull() { throw new NotImplementedException(); // TODO: CefV8Value.CreateNull } /// <summary> /// Create a new CefV8Value object of type bool. /// </summary> public static cef_v8value_t* CreateBool(int value) { throw new NotImplementedException(); // TODO: CefV8Value.CreateBool } /// <summary> /// Create a new CefV8Value object of type int. /// </summary> public static cef_v8value_t* CreateInt(int value) { throw new NotImplementedException(); // TODO: CefV8Value.CreateInt } /// <summary> /// Create a new CefV8Value object of type unsigned int. /// </summary> public static cef_v8value_t* CreateUInt(uint value) { throw new NotImplementedException(); // TODO: CefV8Value.CreateUInt } /// <summary> /// Create a new CefV8Value object of type double. /// </summary> public static cef_v8value_t* CreateDouble(double value) { throw new NotImplementedException(); // TODO: CefV8Value.CreateDouble } /// <summary> /// Create a new CefV8Value object of type Date. This method should only be /// called from within the scope of a CefRenderProcessHandler, CefV8Handler or /// CefV8Accessor callback, or in combination with calling Enter() and Exit() /// on a stored CefV8Context reference. /// </summary> public static cef_v8value_t* CreateDate(cef_time_t* date) { throw new NotImplementedException(); // TODO: CefV8Value.CreateDate } /// <summary> /// Create a new CefV8Value object of type string. /// </summary> public static cef_v8value_t* CreateString(cef_string_t* value) { throw new NotImplementedException(); // TODO: CefV8Value.CreateString } /// <summary> /// Create a new CefV8Value object of type object with optional accessor and/or /// interceptor. This method should only be called from within the scope of a /// CefRenderProcessHandler, CefV8Handler or CefV8Accessor callback, or in /// combination with calling Enter() and Exit() on a stored CefV8Context /// reference. /// </summary> public static cef_v8value_t* CreateObject(cef_v8accessor_t* accessor, cef_v8interceptor_t* interceptor) { throw new NotImplementedException(); // TODO: CefV8Value.CreateObject } /// <summary> /// Create a new CefV8Value object of type array with the specified |length|. /// If |length| is negative the returned array will have length 0. This method /// should only be called from within the scope of a CefRenderProcessHandler, /// CefV8Handler or CefV8Accessor callback, or in combination with calling /// Enter() and Exit() on a stored CefV8Context reference. /// </summary> public static cef_v8value_t* CreateArray(int length) { throw new NotImplementedException(); // TODO: CefV8Value.CreateArray } /// <summary> /// Create a new CefV8Value object of type function. This method should only be /// called from within the scope of a CefRenderProcessHandler, CefV8Handler or /// CefV8Accessor callback, or in combination with calling Enter() and Exit() /// on a stored CefV8Context reference. /// </summary> public static cef_v8value_t* CreateFunction(cef_string_t* name, cef_v8handler_t* handler) { throw new NotImplementedException(); // TODO: CefV8Value.CreateFunction } /// <summary> /// Returns true if the underlying handle is valid and it can be accessed on /// the current thread. Do not call any other methods if this method returns /// false. /// </summary> public int IsValid() { throw new NotImplementedException(); // TODO: CefV8Value.IsValid } /// <summary> /// True if the value type is undefined. /// </summary> public int IsUndefined() { throw new NotImplementedException(); // TODO: CefV8Value.IsUndefined } /// <summary> /// True if the value type is null. /// </summary> public int IsNull() { throw new NotImplementedException(); // TODO: CefV8Value.IsNull } /// <summary> /// True if the value type is bool. /// </summary> public int IsBool() { throw new NotImplementedException(); // TODO: CefV8Value.IsBool } /// <summary> /// True if the value type is int. /// </summary> public int IsInt() { throw new NotImplementedException(); // TODO: CefV8Value.IsInt } /// <summary> /// True if the value type is unsigned int. /// </summary> public int IsUInt() { throw new NotImplementedException(); // TODO: CefV8Value.IsUInt } /// <summary> /// True if the value type is double. /// </summary> public int IsDouble() { throw new NotImplementedException(); // TODO: CefV8Value.IsDouble } /// <summary> /// True if the value type is Date. /// </summary> public int IsDate() { throw new NotImplementedException(); // TODO: CefV8Value.IsDate } /// <summary> /// True if the value type is string. /// </summary> public int IsString() { throw new NotImplementedException(); // TODO: CefV8Value.IsString } /// <summary> /// True if the value type is object. /// </summary> public int IsObject() { throw new NotImplementedException(); // TODO: CefV8Value.IsObject } /// <summary> /// True if the value type is array. /// </summary> public int IsArray() { throw new NotImplementedException(); // TODO: CefV8Value.IsArray } /// <summary> /// True if the value type is function. /// </summary> public int IsFunction() { throw new NotImplementedException(); // TODO: CefV8Value.IsFunction } /// <summary> /// Returns true if this object is pointing to the same handle as |that| /// object. /// </summary> public int IsSame(cef_v8value_t* that) { throw new NotImplementedException(); // TODO: CefV8Value.IsSame } /// <summary> /// Return a bool value. /// </summary> public int GetBoolValue() { throw new NotImplementedException(); // TODO: CefV8Value.GetBoolValue } /// <summary> /// Return an int value. /// </summary> public int GetIntValue() { throw new NotImplementedException(); // TODO: CefV8Value.GetIntValue } /// <summary> /// Return an unsigned int value. /// </summary> public uint GetUIntValue() { throw new NotImplementedException(); // TODO: CefV8Value.GetUIntValue } /// <summary> /// Return a double value. /// </summary> public double GetDoubleValue() { throw new NotImplementedException(); // TODO: CefV8Value.GetDoubleValue } /// <summary> /// Return a Date value. /// </summary> public cef_time_t GetDateValue() { throw new NotImplementedException(); // TODO: CefV8Value.GetDateValue } /// <summary> /// Return a string value. /// </summary> public cef_string_userfree* GetStringValue() { throw new NotImplementedException(); // TODO: CefV8Value.GetStringValue } /// <summary> /// OBJECT METHODS - These methods are only available on objects. Arrays and /// functions are also objects. String- and integer-based keys can be used /// interchangably with the framework converting between them as necessary. /// Returns true if this is a user created object. /// </summary> public int IsUserCreated() { throw new NotImplementedException(); // TODO: CefV8Value.IsUserCreated } /// <summary> /// Returns true if the last method call resulted in an exception. This /// attribute exists only in the scope of the current CEF value object. /// </summary> public int HasException() { throw new NotImplementedException(); // TODO: CefV8Value.HasException } /// <summary> /// Returns the exception resulting from the last method call. This attribute /// exists only in the scope of the current CEF value object. /// </summary> public cef_v8exception_t* GetException() { throw new NotImplementedException(); // TODO: CefV8Value.GetException } /// <summary> /// Clears the last exception and returns true on success. /// </summary> public int ClearException() { throw new NotImplementedException(); // TODO: CefV8Value.ClearException } /// <summary> /// Returns true if this object will re-throw future exceptions. This attribute /// exists only in the scope of the current CEF value object. /// </summary> public int WillRethrowExceptions() { throw new NotImplementedException(); // TODO: CefV8Value.WillRethrowExceptions } /// <summary> /// Set whether this object will re-throw future exceptions. By default /// exceptions are not re-thrown. If a exception is re-thrown the current /// context should not be accessed again until after the exception has been /// caught and not re-thrown. Returns true on success. This attribute exists /// only in the scope of the current CEF value object. /// </summary> public int SetRethrowExceptions(int rethrow) { throw new NotImplementedException(); // TODO: CefV8Value.SetRethrowExceptions } /// <summary> /// Returns true if the object has a value with the specified identifier. /// </summary> public int HasValue(cef_string_t* key) { throw new NotImplementedException(); // TODO: CefV8Value.HasValue } /// <summary> /// Returns true if the object has a value with the specified identifier. /// </summary> public int HasValue(int index) { throw new NotImplementedException(); // TODO: CefV8Value.HasValue } /// <summary> /// Deletes the value with the specified identifier and returns true on /// success. Returns false if this method is called incorrectly or an exception /// is thrown. For read-only and don't-delete values this method will return /// true even though deletion failed. /// </summary> public int DeleteValue(cef_string_t* key) { throw new NotImplementedException(); // TODO: CefV8Value.DeleteValue } /// <summary> /// Deletes the value with the specified identifier and returns true on /// success. Returns false if this method is called incorrectly, deletion fails /// or an exception is thrown. For read-only and don't-delete values this /// method will return true even though deletion failed. /// </summary> public int DeleteValue(int index) { throw new NotImplementedException(); // TODO: CefV8Value.DeleteValue } /// <summary> /// Returns the value with the specified identifier on success. Returns NULL /// if this method is called incorrectly or an exception is thrown. /// </summary> public cef_v8value_t* GetValue(cef_string_t* key) { throw new NotImplementedException(); // TODO: CefV8Value.GetValue } /// <summary> /// Returns the value with the specified identifier on success. Returns NULL /// if this method is called incorrectly or an exception is thrown. /// </summary> public cef_v8value_t* GetValue(int index) { throw new NotImplementedException(); // TODO: CefV8Value.GetValue } /// <summary> /// Associates a value with the specified identifier and returns true on /// success. Returns false if this method is called incorrectly or an exception /// is thrown. For read-only values this method will return true even though /// assignment failed. /// </summary> public int SetValue(cef_string_t* key, cef_v8value_t* value, CefV8PropertyAttribute attribute) { throw new NotImplementedException(); // TODO: CefV8Value.SetValue } /// <summary> /// Associates a value with the specified identifier and returns true on /// success. Returns false if this method is called incorrectly or an exception /// is thrown. For read-only values this method will return true even though /// assignment failed. /// </summary> public int SetValue(int index, cef_v8value_t* value) { throw new NotImplementedException(); // TODO: CefV8Value.SetValue } /// <summary> /// Registers an identifier and returns true on success. Access to the /// identifier will be forwarded to the CefV8Accessor instance passed to /// CefV8Value::CreateObject(). Returns false if this method is called /// incorrectly or an exception is thrown. For read-only values this method /// will return true even though assignment failed. /// </summary> public int SetValue(cef_string_t* key, CefV8AccessControl settings, CefV8PropertyAttribute attribute) { throw new NotImplementedException(); // TODO: CefV8Value.SetValue } /// <summary> /// Read the keys for the object's values into the specified vector. Integer- /// based keys will also be returned as strings. /// </summary> public int GetKeys(cef_string_list* keys) { throw new NotImplementedException(); // TODO: CefV8Value.GetKeys } /// <summary> /// Sets the user data for this object and returns true on success. Returns /// false if this method is called incorrectly. This method can only be called /// on user created objects. /// </summary> public int SetUserData(cef_base_t* user_data) { throw new NotImplementedException(); // TODO: CefV8Value.SetUserData } /// <summary> /// Returns the user data, if any, assigned to this object. /// </summary> public cef_base_t* GetUserData() { throw new NotImplementedException(); // TODO: CefV8Value.GetUserData } /// <summary> /// Returns the amount of externally allocated memory registered for the /// object. /// </summary> public int GetExternallyAllocatedMemory() { throw new NotImplementedException(); // TODO: CefV8Value.GetExternallyAllocatedMemory } /// <summary> /// Adjusts the amount of registered external memory for the object. Used to /// give V8 an indication of the amount of externally allocated memory that is /// kept alive by JavaScript objects. V8 uses this information to decide when /// to perform global garbage collection. Each CefV8Value tracks the amount of /// external memory associated with it and automatically decreases the global /// total by the appropriate amount on its destruction. |change_in_bytes| /// specifies the number of bytes to adjust by. This method returns the number /// of bytes associated with the object after the adjustment. This method can /// only be called on user created objects. /// </summary> public int AdjustExternallyAllocatedMemory(int change_in_bytes) { throw new NotImplementedException(); // TODO: CefV8Value.AdjustExternallyAllocatedMemory } /// <summary> /// ARRAY METHODS - These methods are only available on arrays. /// Returns the number of elements in the array. /// </summary> public int GetArrayLength() { throw new NotImplementedException(); // TODO: CefV8Value.GetArrayLength } /// <summary> /// FUNCTION METHODS - These methods are only available on functions. /// Returns the function name. /// </summary> public cef_string_userfree* GetFunctionName() { throw new NotImplementedException(); // TODO: CefV8Value.GetFunctionName } /// <summary> /// Returns the function handler or NULL if not a CEF-created function. /// </summary> public cef_v8handler_t* GetFunctionHandler() { throw new NotImplementedException(); // TODO: CefV8Value.GetFunctionHandler } /// <summary> /// Execute the function using the current V8 context. This method should only /// be called from within the scope of a CefV8Handler or CefV8Accessor /// callback, or in combination with calling Enter() and Exit() on a stored /// CefV8Context reference. |object| is the receiver ('this' object) of the /// function. If |object| is empty the current context's global object will be /// used. |arguments| is the list of arguments that will be passed to the /// function. Returns the function return value on success. Returns NULL if /// this method is called incorrectly or an exception is thrown. /// </summary> public cef_v8value_t* ExecuteFunction(cef_v8value_t* @object, UIntPtr argumentsCount, cef_v8value_t** arguments) { throw new NotImplementedException(); // TODO: CefV8Value.ExecuteFunction } /// <summary> /// Execute the function using the specified V8 context. |object| is the /// receiver ('this' object) of the function. If |object| is empty the /// specified context's global object will be used. |arguments| is the list of /// arguments that will be passed to the function. Returns the function return /// value on success. Returns NULL if this method is called incorrectly or an /// exception is thrown. /// </summary> public cef_v8value_t* ExecuteFunctionWithContext(cef_v8context_t* context, cef_v8value_t* @object, UIntPtr argumentsCount, cef_v8value_t** arguments) { throw new NotImplementedException(); // TODO: CefV8Value.ExecuteFunctionWithContext } } }
using System; using System.Collections; using System.Collections.Generic; using System.IO; using TerrificNet.Thtml.Parsing; using TerrificNet.Thtml.Parsing.Handlebars; namespace TerrificNet.Thtml.Emit.Compiler { class IlExpressionEmitNodeVisitor : INodeVisitor { private readonly IDataBinder _dataBinder; private readonly Stack<Scope> _scopes = new Stack<Scope>(); public IlExpressionEmitNodeVisitor(IDataBinder dataBinder, IHelperBinder helperBinder) { _dataBinder = dataBinder; } public Action<TextWriter, IDataContext, IRenderingContext> Generate() { var scope = _scopes.Pop(); if (_scopes.Count != 0) throw new Exception("Scopes not closed"); Action<TextWriter, IDataContext, IRenderingContext> exp = (writer, dataContext, renderingContext) => { foreach (var action in scope.Elements) { action(writer, dataContext, renderingContext); } }; return exp; } public bool BeforeVisit(Document document) { _scopes.Push(new Scope()); return true; } public void AfterVisit(Document document) { } public bool BeforeVisit(Element element) { _scopes.Push(new Scope()); return true; } public void AfterVisit(Element element) { var scope = _scopes.Pop(); Action<TextWriter, IDataContext, IRenderingContext> exp = (writer, dataContext, renderingContext) => { writer.Write($"<{element.TagName}"); foreach (var attribute in scope.Attributes) { writer.Write(" "); attribute.Item1(writer, dataContext, renderingContext); writer.Write("=\""); attribute.Item2(writer, dataContext, renderingContext); writer.Write("\""); } writer.Write(">"); foreach (var action in scope.Elements) { action(writer, dataContext, renderingContext); } writer.Write($"</{element.TagName}>"); }; var parentScope = _scopes.Peek(); parentScope.Elements.Add(exp); } public void Visit(ConstantAttributeContent attributeContent) { var scope = _scopes.Peek(); scope.Elements.Add((writer, d, rc) => writer.Write(attributeContent.Text)); } public bool BeforeVisit(Statement statement) { var expression = statement.Expression; var iterationExpression = expression as IterationExpression; if (iterationExpression != null) { var scope = new Scope(); _scopes.Push(scope); var visitor = new EmitExpressionVisitor(_dataBinder, new NullHelperBinder()); iterationExpression.Expression.Accept(visitor); var context = _dataBinder.Context(); IEvaluator<IEnumerable> evaluator; if (!context.TryCreateEvaluation(out evaluator)) return false; Action<TextWriter, IDataContext, IRenderingContext> ext = (writer, dataContext, renderingContext) => { var items = evaluator.Evaluate(dataContext); foreach (var item in items) { } }; scope.Elements.Add(ext); //IEvaluator<string> evaluator; //if (result.TryCreateEvaluation(out evaluator)) //{ // scope.Elements.Add((writer, dataContext, renderingContext) => // { // var value = evaluator.Evaluate(dataContext); // writer.Write(value); // }); //} } return true; } public void AfterVisit(Statement statement) { var scope = _scopes.Peek(); var expression = statement.Expression; var iterationExpression = expression as IterationExpression; if (iterationExpression != null) { throw new NotImplementedException(); } var conditionalExpression = expression as ConditionalExpression; if (conditionalExpression != null) { throw new NotImplementedException(); } var callHelperExpression = expression as CallHelperExpression; if (callHelperExpression != null) { throw new NotImplementedException(); } var memberExpression = expression as MemberExpression; if (memberExpression != null) { var result = _dataBinder.Property(memberExpression.Name); IEvaluator<string> evaluator; if (result.TryCreateEvaluation(out evaluator)) { scope.Elements.Add((writer, dataContext, renderingContext) => { var value = evaluator.Evaluate(dataContext); writer.Write(value); }); } return; } throw new NotImplementedException(); } public void Visit(TextNode textNode) { Action<TextWriter, IDataContext, IRenderingContext> exp = (writer, d, rc) => writer.Write(textNode.Text); _scopes.Peek().Elements.Add(exp); } public void Visit(AttributeContentStatement constantAttributeContent) { throw new NotImplementedException(); } public bool BeforeVisit(AttributeNode attributeNode) { _scopes.Push(new Scope()); return true; } public void AfterVisit(AttributeNode attributeNode) { var scope = _scopes.Pop(); Action<TextWriter, IDataContext, IRenderingContext> a = (w, d, rc) => { foreach (var element in scope.Elements) { element(w, d, rc); } }; var parentScope = _scopes.Peek(); parentScope.Attributes.Add(new Tuple<Action<TextWriter, IDataContext, IRenderingContext>, Action<TextWriter, IDataContext, IRenderingContext>>((w, d, r) => w.Write(attributeNode.Name), a)); } public bool BeforeVisit(CompositeAttributeContent compositeAttributeContent) { throw new NotImplementedException(); } public void AfterVisit(CompositeAttributeContent compositeAttributeContent) { throw new NotImplementedException(); } public bool BeforeVisit(AttributeStatement attributeStatement) { throw new NotImplementedException(); } public void AfterVisit(AttributeStatement attributeStatement) { throw new NotImplementedException(); } private class Scope { public List<Action<TextWriter, IDataContext, IRenderingContext>> Elements { get; } public List<Tuple<Action<TextWriter, IDataContext, IRenderingContext>, Action<TextWriter, IDataContext, IRenderingContext>>> Attributes { get; } public Scope() { Elements = new List<Action<TextWriter, IDataContext, IRenderingContext>>(); Attributes = new List<Tuple<Action<TextWriter, IDataContext, IRenderingContext>, Action<TextWriter, IDataContext, IRenderingContext>>>(); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Globalization; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Serialization; namespace System { // Implements the Decimal data type. The Decimal data type can // represent values ranging from -79,228,162,514,264,337,593,543,950,335 to // 79,228,162,514,264,337,593,543,950,335 with 28 significant digits. The // Decimal data type is ideally suited to financial calculations that // require a large number of significant digits and no round-off errors. // // The finite set of values of type Decimal are of the form m // / 10e, where m is an integer such that // -296 <; m <; 296, and e is an integer // between 0 and 28 inclusive. // // Contrary to the float and double data types, decimal // fractional numbers such as 0.1 can be represented exactly in the // Decimal representation. In the float and double // representations, such numbers are often infinite fractions, making those // representations more prone to round-off errors. // // The Decimal class implements widening conversions from the // ubyte, char, short, int, and long types // to Decimal. These widening conversions never loose any information // and never throw exceptions. The Decimal class also implements // narrowing conversions from Decimal to ubyte, char, // short, int, and long. These narrowing conversions round // the Decimal value towards zero to the nearest integer, and then // converts that integer to the destination type. An OverflowException // is thrown if the result is not within the range of the destination type. // // The Decimal class provides a widening conversion from // Currency to Decimal. This widening conversion never loses any // information and never throws exceptions. The Currency class provides // a narrowing conversion from Decimal to Currency. This // narrowing conversion rounds the Decimal to four decimals and then // converts that number to a Currency. An OverflowException // is thrown if the result is not within the range of the Currency type. // // The Decimal class provides narrowing conversions to and from the // float and double types. A conversion from Decimal to // float or double may loose precision, but will not loose // information about the overall magnitude of the numeric value, and will never // throw an exception. A conversion from float or double to // Decimal throws an OverflowException if the value is not within // the range of the Decimal type. [Serializable] [StructLayout(LayoutKind.Explicit)] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] public partial struct Decimal : IFormattable, IComparable, IConvertible, IComparable<Decimal>, IEquatable<Decimal>, IDeserializationCallback, ISpanFormattable { // Sign mask for the flags field. A value of zero in this bit indicates a // positive Decimal value, and a value of one in this bit indicates a // negative Decimal value. // // Look at OleAut's DECIMAL_NEG constant to check for negative values // in native code. private const uint SignMask = 0x80000000; // Scale mask for the flags field. This byte in the flags field contains // the power of 10 to divide the Decimal value by. The scale byte must // contain a value between 0 and 28 inclusive. private const uint ScaleMask = 0x00FF0000; // Number of bits scale is shifted by. private const int ScaleShift = 16; // Constant representing the Decimal value 0. public const Decimal Zero = 0m; // Constant representing the Decimal value 1. public const Decimal One = 1m; // Constant representing the Decimal value -1. public const Decimal MinusOne = -1m; // Constant representing the largest possible Decimal value. The value of // this constant is 79,228,162,514,264,337,593,543,950,335. public const Decimal MaxValue = 79228162514264337593543950335m; // Constant representing the smallest possible Decimal value. The value of // this constant is -79,228,162,514,264,337,593,543,950,335. public const Decimal MinValue = -79228162514264337593543950335m; private const int CurrencyScale = 4; // Divide the "Int64" representation by 1E4 to get the "true" value of the Currency. // The lo, mid, hi, and flags fields contain the representation of the // Decimal value. The lo, mid, and hi fields contain the 96-bit integer // part of the Decimal. Bits 0-15 (the lower word) of the flags field are // unused and must be zero; bits 16-23 contain must contain a value between // 0 and 28, indicating the power of 10 to divide the 96-bit integer part // by to produce the Decimal value; bits 24-30 are unused and must be zero; // and finally bit 31 indicates the sign of the Decimal value, 0 meaning // positive and 1 meaning negative. // // NOTE: Do not change the offsets of these fields. This structure maps to the OleAut DECIMAL structure // and can be passed as such in P/Invokes. [FieldOffset(0)] private int flags; // Do not rename (binary serialization) [FieldOffset(4)] private int hi; // Do not rename (binary serialization) [FieldOffset(8)] private int lo; // Do not rename (binary serialization) [FieldOffset(12)] private int mid; // Do not rename (binary serialization) // NOTE: This set of fields overlay the ones exposed to serialization (which have to be signed ints for serialization compat.) // The code inside Decimal was ported from C++ and expect unsigned values. [FieldOffset(0), NonSerialized] private uint uflags; [FieldOffset(4), NonSerialized] private uint uhi; [FieldOffset(8), NonSerialized] private uint ulo; [FieldOffset(12), NonSerialized] private uint umid; /// <summary> /// The low and mid fields combined in little-endian order /// </summary> [FieldOffset(8), NonSerialized] private ulong ulomidLE; // Constructs a Decimal from an integer value. // public Decimal(int value) { // JIT today can't inline methods that contains "starg" opcode. // For more details, see DevDiv Bugs 81184: x86 JIT CQ: Removing the inline striction of "starg". int value_copy = value; if (value_copy >= 0) { uflags = 0; } else { uflags = SignMask; value_copy = -value_copy; } lo = value_copy; mid = 0; hi = 0; } // Constructs a Decimal from an unsigned integer value. // [CLSCompliant(false)] public Decimal(uint value) { uflags = 0; ulo = value; umid = 0; uhi = 0; } // Constructs a Decimal from a long value. // public Decimal(long value) { // JIT today can't inline methods that contains "starg" opcode. // For more details, see DevDiv Bugs 81184: x86 JIT CQ: Removing the inline striction of "starg". long value_copy = value; if (value_copy >= 0) { uflags = 0; } else { uflags = SignMask; value_copy = -value_copy; } Low64 = (ulong)value_copy; uhi = 0; } // Constructs a Decimal from an unsigned long value. // [CLSCompliant(false)] public Decimal(ulong value) { uflags = 0; Low64 = value; uhi = 0; } // Constructs a Decimal from a float value. // public Decimal(float value) { DecCalc.VarDecFromR4(value, out this); } // Constructs a Decimal from a double value. // public Decimal(double value) { DecCalc.VarDecFromR8(value, out this); } // // Decimal <==> Currency conversion. // // A Currency represents a positive or negative decimal value with 4 digits past the decimal point. The actual Int64 representation used by these methods // is the currency value multiplied by 10,000. For example, a currency value of $12.99 would be represented by the Int64 value 129,900. // public static Decimal FromOACurrency(long cy) { Decimal d = default(Decimal); ulong absoluteCy; // has to be ulong to accommodate the case where cy == long.MinValue. if (cy < 0) { d.IsNegative = true; absoluteCy = (ulong)(-cy); } else { absoluteCy = (ulong)cy; } // In most cases, FromOACurrency() produces a Decimal with Scale set to 4. Unless, that is, some of the trailing digits past the decimal point are zero, // in which case, for compatibility with .Net, we reduce the Scale by the number of zeros. While the result is still numerically equivalent, the scale does // affect the ToString() value. In particular, it prevents a converted currency value of $12.95 from printing uglily as "12.9500". int scale = CurrencyScale; if (absoluteCy != 0) // For compatibility, a currency of 0 emits the Decimal "0.0000" (scale set to 4). { while (scale != 0 && ((absoluteCy % 10) == 0)) { scale--; absoluteCy /= 10; } } // No need to set d.Hi32 - a currency will never go high enough for it to be anything other than zero. d.Low64 = absoluteCy; d.Scale = scale; return d; } public static long ToOACurrency(Decimal value) { return DecCalc.VarCyFromDec(ref value); } private static bool IsValid(uint flags) => (flags & ~(SignMask | ScaleMask)) == 0 && ((flags & ScaleMask) <= (28 << 16)); // Constructs a Decimal from an integer array containing a binary // representation. The bits argument must be a non-null integer // array with four elements. bits[0], bits[1], and // bits[2] contain the low, middle, and high 32 bits of the 96-bit // integer part of the Decimal. bits[3] contains the scale factor // and sign of the Decimal: bits 0-15 (the lower word) are unused and must // be zero; bits 16-23 must contain a value between 0 and 28, indicating // the power of 10 to divide the 96-bit integer part by to produce the // Decimal value; bits 24-30 are unused and must be zero; and finally bit // 31 indicates the sign of the Decimal value, 0 meaning positive and 1 // meaning negative. // // Note that there are several possible binary representations for the // same numeric value. For example, the value 1 can be represented as {1, // 0, 0, 0} (integer value 1 with a scale factor of 0) and equally well as // {1000, 0, 0, 0x30000} (integer value 1000 with a scale factor of 3). // The possible binary representations of a particular value are all // equally valid, and all are numerically equivalent. // public Decimal(int[] bits) { if (bits == null) throw new ArgumentNullException(nameof(bits)); if (bits.Length == 4) { uint f = (uint)bits[3]; if (IsValid(f)) { lo = bits[0]; mid = bits[1]; hi = bits[2]; uflags = f; return; } } throw new ArgumentException(SR.Arg_DecBitCtor); } // Constructs a Decimal from its constituent parts. // public Decimal(int lo, int mid, int hi, bool isNegative, byte scale) { if (scale > 28) throw new ArgumentOutOfRangeException(nameof(scale), SR.ArgumentOutOfRange_DecimalScale); this.lo = lo; this.mid = mid; this.hi = hi; uflags = ((uint)scale) << 16; if (isNegative) uflags |= SignMask; } void IDeserializationCallback.OnDeserialization(Object sender) { // OnDeserialization is called after each instance of this class is deserialized. // This callback method performs decimal validation after being deserialized. if (!IsValid(uflags)) throw new SerializationException(SR.Overflow_Decimal); } // Constructs a Decimal from its constituent parts. private Decimal(ulong ulomidLE, uint hi, uint flags) { this.ulomidLE = ulomidLE; this.uhi = hi; this.uflags = flags; } // Returns the absolute value of the given Decimal. If d is // positive, the result is d. If d is negative, the result // is -d. // internal static Decimal Abs(ref Decimal d) { return new Decimal(d.ulomidLE, d.uhi, d.uflags & ~SignMask); } // Adds two Decimal values. // public static Decimal Add(Decimal d1, Decimal d2) { DecCalc.VarDecAdd(ref d1, ref d2); return d1; } // Rounds a Decimal to an integer value. The Decimal argument is rounded // towards positive infinity. public static Decimal Ceiling(Decimal d) { uint flags = d.uflags; if ((flags & ScaleMask) != 0) DecCalc.InternalRound(ref d, (byte)(flags >> ScaleShift), DecCalc.RoundingMode.Ceiling); return d; } // Compares two Decimal values, returning an integer that indicates their // relationship. // public static int Compare(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2); } // Compares this object to another object, returning an integer that // indicates the relationship. // Returns a value less than zero if this object // null is considered to be less than any instance. // If object is not of type Decimal, this method throws an ArgumentException. // public int CompareTo(Object value) { if (value == null) return 1; if (!(value is Decimal)) throw new ArgumentException(SR.Arg_MustBeDecimal); Decimal other = (Decimal)value; return DecCalc.VarDecCmp(ref this, ref other); } public int CompareTo(Decimal value) { return DecCalc.VarDecCmp(ref this, ref value); } // Divides two Decimal values. // public static Decimal Divide(Decimal d1, Decimal d2) { DecCalc.VarDecDiv(ref d1, ref d2); return d1; } // Checks if this Decimal is equal to a given object. Returns true // if the given object is a boxed Decimal and its value is equal to the // value of this Decimal. Returns false otherwise. // public override bool Equals(Object value) { if (value is Decimal) { Decimal other = (Decimal)value; return DecCalc.VarDecCmp(ref this, ref other) == 0; } return false; } public bool Equals(Decimal value) { return DecCalc.VarDecCmp(ref this, ref value) == 0; } // Returns the hash code for this Decimal. // public unsafe override int GetHashCode() { double dbl = DecCalc.VarR8FromDec(ref this); if (dbl == 0.0) // Ensure 0 and -0 have the same hash code return 0; // conversion to double is lossy and produces rounding errors so we mask off the lowest 4 bits // // For example these two numerically equal decimals with different internal representations produce // slightly different results when converted to double: // // decimal a = new decimal(new int[] { 0x76969696, 0x2fdd49fa, 0x409783ff, 0x00160000 }); // => (decimal)1999021.176470588235294117647000000000 => (double)1999021.176470588 // decimal b = new decimal(new int[] { 0x3f0f0f0f, 0x1e62edcc, 0x06758d33, 0x00150000 }); // => (decimal)1999021.176470588235294117647000000000 => (double)1999021.1764705882 // return (int)(((((uint*)&dbl)[0]) & 0xFFFFFFF0) ^ ((uint*)&dbl)[1]); } // Compares two Decimal values for equality. Returns true if the two // Decimal values are equal, or false if they are not equal. // public static bool Equals(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) == 0; } // Rounds a Decimal to an integer value. The Decimal argument is rounded // towards negative infinity. // public static Decimal Floor(Decimal d) { uint flags = d.uflags; if ((flags & ScaleMask) != 0) DecCalc.InternalRound(ref d, (byte)(flags >> ScaleShift), DecCalc.RoundingMode.Floor); return d; } // Converts this Decimal to a string. The resulting string consists of an // optional minus sign ("-") followed to a sequence of digits ("0" - "9"), // optionally followed by a decimal point (".") and another sequence of // digits. // public override String ToString() { return Number.FormatDecimal(this, null, NumberFormatInfo.CurrentInfo); } public String ToString(String format) { return Number.FormatDecimal(this, format, NumberFormatInfo.CurrentInfo); } public String ToString(IFormatProvider provider) { return Number.FormatDecimal(this, null, NumberFormatInfo.GetInstance(provider)); } public String ToString(String format, IFormatProvider provider) { return Number.FormatDecimal(this, format, NumberFormatInfo.GetInstance(provider)); } public bool TryFormat(Span<char> destination, out int charsWritten, ReadOnlySpan<char> format = default, IFormatProvider provider = null) { return Number.TryFormatDecimal(this, format, NumberFormatInfo.GetInstance(provider), destination, out charsWritten); } // Converts a string to a Decimal. The string must consist of an optional // minus sign ("-") followed by a sequence of digits ("0" - "9"). The // sequence of digits may optionally contain a single decimal point (".") // character. Leading and trailing whitespace characters are allowed. // Parse also allows a currency symbol, a trailing negative sign, and // parentheses in the number. // public static Decimal Parse(String s) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Number.ParseDecimal(s, NumberStyles.Number, NumberFormatInfo.CurrentInfo); } internal const NumberStyles InvalidNumberStyles = ~(NumberStyles.AllowLeadingWhite | NumberStyles.AllowTrailingWhite | NumberStyles.AllowLeadingSign | NumberStyles.AllowTrailingSign | NumberStyles.AllowParentheses | NumberStyles.AllowDecimalPoint | NumberStyles.AllowThousands | NumberStyles.AllowExponent | NumberStyles.AllowCurrencySymbol | NumberStyles.AllowHexSpecifier); internal static void ValidateParseStyleFloatingPoint(NumberStyles style) { // Check for undefined flags if ((style & InvalidNumberStyles) != 0) { throw new ArgumentException(SR.Argument_InvalidNumberStyles, nameof(style)); } if ((style & NumberStyles.AllowHexSpecifier) != 0) { // Check for hex number throw new ArgumentException(SR.Arg_HexStyleNotSupported); } } public static Decimal Parse(String s, NumberStyles style) { ValidateParseStyleFloatingPoint(style); if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Number.ParseDecimal(s, style, NumberFormatInfo.CurrentInfo); } public static Decimal Parse(String s, IFormatProvider provider) { if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Number.ParseDecimal(s, NumberStyles.Number, NumberFormatInfo.GetInstance(provider)); } public static Decimal Parse(String s, NumberStyles style, IFormatProvider provider) { ValidateParseStyleFloatingPoint(style); if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s); return Number.ParseDecimal(s, style, NumberFormatInfo.GetInstance(provider)); } public static Decimal Parse(ReadOnlySpan<char> s, NumberStyles style = NumberStyles.Integer, IFormatProvider provider = null) { ValidateParseStyleFloatingPoint(style); return Number.ParseDecimal(s, style, NumberFormatInfo.GetInstance(provider)); } public static Boolean TryParse(String s, out Decimal result) { if (s == null) { result = 0; return false; } return Number.TryParseDecimal(s, NumberStyles.Number, NumberFormatInfo.CurrentInfo, out result); } public static bool TryParse(ReadOnlySpan<char> s, out decimal result) { return Number.TryParseDecimal(s, NumberStyles.Number, NumberFormatInfo.CurrentInfo, out result); } public static Boolean TryParse(String s, NumberStyles style, IFormatProvider provider, out Decimal result) { ValidateParseStyleFloatingPoint(style); if (s == null) { result = 0; return false; } return Number.TryParseDecimal(s, style, NumberFormatInfo.GetInstance(provider), out result); } public static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider provider, out decimal result) { ValidateParseStyleFloatingPoint(style); return Number.TryParseDecimal(s, style, NumberFormatInfo.GetInstance(provider), out result); } // Returns a binary representation of a Decimal. The return value is an // integer array with four elements. Elements 0, 1, and 2 contain the low, // middle, and high 32 bits of the 96-bit integer part of the Decimal. // Element 3 contains the scale factor and sign of the Decimal: bits 0-15 // (the lower word) are unused; bits 16-23 contain a value between 0 and // 28, indicating the power of 10 to divide the 96-bit integer part by to // produce the Decimal value; bits 24-30 are unused; and finally bit 31 // indicates the sign of the Decimal value, 0 meaning positive and 1 // meaning negative. // public static int[] GetBits(Decimal d) { return new int[] { d.lo, d.mid, d.hi, d.flags }; } internal static void GetBytes(Decimal d, byte[] buffer) { Debug.Assert((buffer != null && buffer.Length >= 16), "[GetBytes]buffer != null && buffer.Length >= 16"); buffer[0] = (byte)d.lo; buffer[1] = (byte)(d.lo >> 8); buffer[2] = (byte)(d.lo >> 16); buffer[3] = (byte)(d.lo >> 24); buffer[4] = (byte)d.mid; buffer[5] = (byte)(d.mid >> 8); buffer[6] = (byte)(d.mid >> 16); buffer[7] = (byte)(d.mid >> 24); buffer[8] = (byte)d.hi; buffer[9] = (byte)(d.hi >> 8); buffer[10] = (byte)(d.hi >> 16); buffer[11] = (byte)(d.hi >> 24); buffer[12] = (byte)d.flags; buffer[13] = (byte)(d.flags >> 8); buffer[14] = (byte)(d.flags >> 16); buffer[15] = (byte)(d.flags >> 24); } // Returns the larger of two Decimal values. // internal static ref Decimal Max(ref Decimal d1, ref Decimal d2) { return ref DecCalc.VarDecCmp(ref d1, ref d2) >= 0 ? ref d1 : ref d2; } // Returns the smaller of two Decimal values. // internal static ref Decimal Min(ref Decimal d1, ref Decimal d2) { return ref DecCalc.VarDecCmp(ref d1, ref d2) < 0 ? ref d1 : ref d2; } public static Decimal Remainder(Decimal d1, Decimal d2) { return DecCalc.VarDecMod(ref d1, ref d2); } // Multiplies two Decimal values. // public static Decimal Multiply(Decimal d1, Decimal d2) { DecCalc.VarDecMul(ref d1, ref d2); return d1; } // Returns the negated value of the given Decimal. If d is non-zero, // the result is -d. If d is zero, the result is zero. // public static Decimal Negate(Decimal d) { return new Decimal(d.ulomidLE, d.uhi, d.uflags ^ SignMask); } // Rounds a Decimal value to a given number of decimal places. The value // given by d is rounded to the number of decimal places given by // decimals. The decimals argument must be an integer between // 0 and 28 inclusive. // // By default a mid-point value is rounded to the nearest even number. If the mode is // passed in, it can also round away from zero. public static Decimal Round(Decimal d) => Round(ref d, 0, MidpointRounding.ToEven); public static Decimal Round(Decimal d, int decimals) => Round(ref d, decimals, MidpointRounding.ToEven); public static Decimal Round(Decimal d, MidpointRounding mode) => Round(ref d, 0, mode); public static Decimal Round(Decimal d, int decimals, MidpointRounding mode) => Round(ref d, decimals, mode); private static Decimal Round(ref Decimal d, int decimals, MidpointRounding mode) { if ((uint)decimals > 28) throw new ArgumentOutOfRangeException(nameof(decimals), SR.ArgumentOutOfRange_DecimalRound); if ((uint)mode > (uint)MidpointRounding.AwayFromZero) throw new ArgumentException(SR.Format(SR.Argument_InvalidEnumValue, mode, nameof(MidpointRounding)), nameof(mode)); int scale = d.Scale - decimals; if (scale > 0) DecCalc.InternalRound(ref d, (uint)scale, (DecCalc.RoundingMode)mode); return d; } internal static int Sign(ref decimal d) => (d.lo | d.mid | d.hi) == 0 ? 0 : (d.flags >> 31) | 1; // Subtracts two Decimal values. // public static Decimal Subtract(Decimal d1, Decimal d2) { DecCalc.VarDecSub(ref d1, ref d2); return d1; } // Converts a Decimal to an unsigned byte. The Decimal value is rounded // towards zero to the nearest integer value, and the result of this // operation is returned as a byte. // public static byte ToByte(Decimal value) { uint temp; try { temp = ToUInt32(value); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_Byte, e); } if (temp != (byte)temp) throw new OverflowException(SR.Overflow_Byte); return (byte)temp; } // Converts a Decimal to a signed byte. The Decimal value is rounded // towards zero to the nearest integer value, and the result of this // operation is returned as a byte. // [CLSCompliant(false)] public static sbyte ToSByte(Decimal value) { int temp; try { temp = ToInt32(value); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_SByte, e); } if (temp != (sbyte)temp) throw new OverflowException(SR.Overflow_SByte); return (sbyte)temp; } // Converts a Decimal to a short. The Decimal value is // rounded towards zero to the nearest integer value, and the result of // this operation is returned as a short. // public static short ToInt16(Decimal value) { int temp; try { temp = ToInt32(value); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_Int16, e); } if (temp != (short)temp) throw new OverflowException(SR.Overflow_Int16); return (short)temp; } // Converts a Decimal to a double. Since a double has fewer significant // digits than a Decimal, this operation may produce round-off errors. // public static double ToDouble(Decimal d) { return DecCalc.VarR8FromDec(ref d); } // Converts a Decimal to an integer. The Decimal value is rounded towards // zero to the nearest integer value, and the result of this operation is // returned as an integer. // public static int ToInt32(Decimal d) { Truncate(ref d); if ((d.hi | d.mid) == 0) { int i = d.lo; if (!d.IsNegative) { if (i >= 0) return i; } else { i = -i; if (i <= 0) return i; } } throw new OverflowException(SR.Overflow_Int32); } // Converts a Decimal to a long. The Decimal value is rounded towards zero // to the nearest integer value, and the result of this operation is // returned as a long. // public static long ToInt64(Decimal d) { Truncate(ref d); if (d.uhi == 0) { long l = (long)d.Low64; if (!d.IsNegative) { if (l >= 0) return l; } else { l = -l; if (l <= 0) return l; } } throw new OverflowException(SR.Overflow_Int64); } // Converts a Decimal to an ushort. The Decimal // value is rounded towards zero to the nearest integer value, and the // result of this operation is returned as an ushort. // [CLSCompliant(false)] public static ushort ToUInt16(Decimal value) { uint temp; try { temp = ToUInt32(value); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_UInt16, e); } if (temp != (ushort)temp) throw new OverflowException(SR.Overflow_UInt16); return (ushort)temp; } // Converts a Decimal to an unsigned integer. The Decimal // value is rounded towards zero to the nearest integer value, and the // result of this operation is returned as an unsigned integer. // [CLSCompliant(false)] public static uint ToUInt32(Decimal d) { Truncate(ref d); if ((d.uhi | d.umid) == 0) { uint i = d.ulo; if (!d.IsNegative || i == 0) return i; } throw new OverflowException(SR.Overflow_UInt32); } // Converts a Decimal to an unsigned long. The Decimal // value is rounded towards zero to the nearest integer value, and the // result of this operation is returned as a long. // [CLSCompliant(false)] public static ulong ToUInt64(Decimal d) { Truncate(ref d); if (d.uhi == 0) { ulong l = d.Low64; if (!d.IsNegative || l == 0) return l; } throw new OverflowException(SR.Overflow_UInt64); } // Converts a Decimal to a float. Since a float has fewer significant // digits than a Decimal, this operation may produce round-off errors. // public static float ToSingle(Decimal d) { return DecCalc.VarR4FromDec(ref d); } // Truncates a Decimal to an integer value. The Decimal argument is rounded // towards zero to the nearest integer value, corresponding to removing all // digits after the decimal point. // public static Decimal Truncate(Decimal d) { Truncate(ref d); return d; } [MethodImpl(MethodImplOptions.AggressiveInlining)] private static void Truncate(ref Decimal d) { uint flags = d.uflags; if ((flags & ScaleMask) != 0) DecCalc.InternalRound(ref d, (byte)(flags >> ScaleShift), DecCalc.RoundingMode.Truncate); } public static implicit operator Decimal(byte value) { return new Decimal(value); } [CLSCompliant(false)] public static implicit operator Decimal(sbyte value) { return new Decimal(value); } public static implicit operator Decimal(short value) { return new Decimal(value); } [CLSCompliant(false)] public static implicit operator Decimal(ushort value) { return new Decimal(value); } public static implicit operator Decimal(char value) { return new Decimal(value); } public static implicit operator Decimal(int value) { return new Decimal(value); } [CLSCompliant(false)] public static implicit operator Decimal(uint value) { return new Decimal(value); } public static implicit operator Decimal(long value) { return new Decimal(value); } [CLSCompliant(false)] public static implicit operator Decimal(ulong value) { return new Decimal(value); } public static explicit operator Decimal(float value) { return new Decimal(value); } public static explicit operator Decimal(double value) { return new Decimal(value); } public static explicit operator byte(Decimal value) { return ToByte(value); } [CLSCompliant(false)] public static explicit operator sbyte(Decimal value) { return ToSByte(value); } public static explicit operator char(Decimal value) { UInt16 temp; try { temp = ToUInt16(value); } catch (OverflowException e) { throw new OverflowException(SR.Overflow_Char, e); } return (char)temp; } public static explicit operator short(Decimal value) { return ToInt16(value); } [CLSCompliant(false)] public static explicit operator ushort(Decimal value) { return ToUInt16(value); } public static explicit operator int(Decimal value) { return ToInt32(value); } [CLSCompliant(false)] public static explicit operator uint(Decimal value) { return ToUInt32(value); } public static explicit operator long(Decimal value) { return ToInt64(value); } [CLSCompliant(false)] public static explicit operator ulong(Decimal value) { return ToUInt64(value); } public static explicit operator float(Decimal value) { return ToSingle(value); } public static explicit operator double(Decimal value) { return ToDouble(value); } public static Decimal operator +(Decimal d) { return d; } public static Decimal operator -(Decimal d) { return Negate(d); } public static Decimal operator ++(Decimal d) { return Add(d, One); } public static Decimal operator --(Decimal d) { return Subtract(d, One); } public static Decimal operator +(Decimal d1, Decimal d2) { return Add(d1, d2); } public static Decimal operator -(Decimal d1, Decimal d2) { return Subtract(d1, d2); } public static Decimal operator *(Decimal d1, Decimal d2) { return Multiply(d1, d2); } public static Decimal operator /(Decimal d1, Decimal d2) { return Divide(d1, d2); } public static Decimal operator %(Decimal d1, Decimal d2) { return Remainder(d1, d2); } public static bool operator ==(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) == 0; } public static bool operator !=(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) != 0; } public static bool operator <(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) < 0; } public static bool operator <=(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) <= 0; } public static bool operator >(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) > 0; } public static bool operator >=(Decimal d1, Decimal d2) { return DecCalc.VarDecCmp(ref d1, ref d2) >= 0; } // // IConvertible implementation // public TypeCode GetTypeCode() { return TypeCode.Decimal; } bool IConvertible.ToBoolean(IFormatProvider provider) { return Convert.ToBoolean(this); } char IConvertible.ToChar(IFormatProvider provider) { throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "Decimal", "Char")); } sbyte IConvertible.ToSByte(IFormatProvider provider) { return Convert.ToSByte(this); } byte IConvertible.ToByte(IFormatProvider provider) { return Convert.ToByte(this); } short IConvertible.ToInt16(IFormatProvider provider) { return Convert.ToInt16(this); } ushort IConvertible.ToUInt16(IFormatProvider provider) { return Convert.ToUInt16(this); } int IConvertible.ToInt32(IFormatProvider provider) { return Convert.ToInt32(this); } uint IConvertible.ToUInt32(IFormatProvider provider) { return Convert.ToUInt32(this); } long IConvertible.ToInt64(IFormatProvider provider) { return Convert.ToInt64(this); } ulong IConvertible.ToUInt64(IFormatProvider provider) { return Convert.ToUInt64(this); } float IConvertible.ToSingle(IFormatProvider provider) { return Convert.ToSingle(this); } double IConvertible.ToDouble(IFormatProvider provider) { return Convert.ToDouble(this); } Decimal IConvertible.ToDecimal(IFormatProvider provider) { return this; } DateTime IConvertible.ToDateTime(IFormatProvider provider) { throw new InvalidCastException(String.Format(SR.InvalidCast_FromTo, "Decimal", "DateTime")); } Object IConvertible.ToType(Type type, IFormatProvider provider) { return Convert.DefaultToType((IConvertible)this, type, provider); } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gaxgrpc = Google.Api.Gax.Grpc; using lro = Google.LongRunning; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using moq = Moq; using st = System.Threading; using stt = System.Threading.Tasks; using xunit = Xunit; namespace Google.Cloud.Talent.V4.Tests { /// <summary>Generated unit tests.</summary> public sealed class GeneratedJobServiceClientTest { [xunit::FactAttribute] public void CreateJobRequestObject() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateJobRequest request = new CreateJobRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), Job = new Job(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.CreateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.CreateJob(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateJobRequestObjectAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateJobRequest request = new CreateJobRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), Job = new Job(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.CreateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.CreateJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.CreateJobAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateJob() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateJobRequest request = new CreateJobRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), Job = new Job(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.CreateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.CreateJob(request.Parent, request.Job); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateJobAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateJobRequest request = new CreateJobRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), Job = new Job(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.CreateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.CreateJobAsync(request.Parent, request.Job, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.CreateJobAsync(request.Parent, request.Job, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void CreateJobResourceNames() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateJobRequest request = new CreateJobRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), Job = new Job(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.CreateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.CreateJob(request.ParentAsTenantName, request.Job); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task CreateJobResourceNamesAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); CreateJobRequest request = new CreateJobRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), Job = new Job(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.CreateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.CreateJobAsync(request.ParentAsTenantName, request.Job, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.CreateJobAsync(request.ParentAsTenantName, request.Job, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetJobRequestObject() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.GetJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.GetJob(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetJobRequestObjectAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.GetJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.GetJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.GetJobAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetJob() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.GetJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.GetJob(request.Name); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetJobAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.GetJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.GetJobAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.GetJobAsync(request.Name, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void GetJobResourceNames() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.GetJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.GetJob(request.JobName); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task GetJobResourceNamesAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); GetJobRequest request = new GetJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.GetJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.GetJobAsync(request.JobName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.GetJobAsync(request.JobName, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateJobRequestObject() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateJobRequest request = new UpdateJobRequest { Job = new Job(), UpdateMask = new wkt::FieldMask(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.UpdateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.UpdateJob(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateJobRequestObjectAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateJobRequest request = new UpdateJobRequest { Job = new Job(), UpdateMask = new wkt::FieldMask(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.UpdateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.UpdateJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.UpdateJobAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void UpdateJob() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateJobRequest request = new UpdateJobRequest { Job = new Job(), UpdateMask = new wkt::FieldMask(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.UpdateJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job response = client.UpdateJob(request.Job, request.UpdateMask); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task UpdateJobAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); UpdateJobRequest request = new UpdateJobRequest { Job = new Job(), UpdateMask = new wkt::FieldMask(), }; Job expectedResponse = new Job { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), CompanyAsCompanyName = CompanyName.FromProjectTenantCompany("[PROJECT]", "[TENANT]", "[COMPANY]"), RequisitionId = "requisition_id21c2f0af", Title = "title17dbd3d5", Description = "description2cf9da67", Addresses = { "addresses2f3a3e96", }, ApplicationInfo = new Job.Types.ApplicationInfo(), JobBenefits = { JobBenefit.Dental, }, CompensationInfo = new CompensationInfo(), CustomAttributes = { { "key8a0b6e3c", new CustomAttribute() }, }, DegreeTypes = { DegreeType.PrimaryEducation, }, Department = "departmentca9f9d45", EmploymentTypes = { EmploymentType.OtherEmploymentType, }, Incentives = "incentives80814488", LanguageCode = "language_code2f6c7160", JobLevel = JobLevel.Director, PromotionValue = 899484920, Qualifications = "qualifications920abb76", Responsibilities = "responsibilities978e5c9b", PostingRegion = PostingRegion.AdministrativeArea, #pragma warning disable CS0612 Visibility = Visibility.SharedWithPublic, #pragma warning restore CS0612 JobStartTime = new wkt::Timestamp(), JobEndTime = new wkt::Timestamp(), PostingPublishTime = new wkt::Timestamp(), PostingExpireTime = new wkt::Timestamp(), PostingCreateTime = new wkt::Timestamp(), PostingUpdateTime = new wkt::Timestamp(), CompanyDisplayName = "company_display_name07e5990f", DerivedInfo = new Job.Types.DerivedInfo(), ProcessingOptions = new Job.Types.ProcessingOptions(), }; mockGrpcClient.Setup(x => x.UpdateJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Job>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); Job responseCallSettings = await client.UpdateJobAsync(request.Job, request.UpdateMask, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); Job responseCancellationToken = await client.UpdateJobAsync(request.Job, request.UpdateMask, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteJobRequestObject() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); client.DeleteJob(request); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteJobRequestObjectAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); await client.DeleteJobAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteJobAsync(request, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteJob() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); client.DeleteJob(request.Name); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteJobAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); await client.DeleteJobAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteJobAsync(request.Name, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void DeleteJobResourceNames() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteJob(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); client.DeleteJob(request.JobName); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task DeleteJobResourceNamesAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); DeleteJobRequest request = new DeleteJobRequest { JobName = JobName.FromProjectTenantJob("[PROJECT]", "[TENANT]", "[JOB]"), }; wkt::Empty expectedResponse = new wkt::Empty { }; mockGrpcClient.Setup(x => x.DeleteJobAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); await client.DeleteJobAsync(request.JobName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); await client.DeleteJobAsync(request.JobName, st::CancellationToken.None); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void SearchJobsRequestObject() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), SearchMode = SearchJobsRequest.Types.SearchMode.FeaturedJobSearch, RequestMetadata = new RequestMetadata(), JobQuery = new JobQuery(), EnableBroadening = false, HistogramQueries = { new HistogramQuery(), }, JobView = JobView.Small, Offset = 1472300666, MaxPageSize = -1271917583, PageToken = "page_tokenf09e5538", OrderBy = "order_byb4d33ada", DiversificationLevel = SearchJobsRequest.Types.DiversificationLevel.TwoPerCompany, CustomRankingInfo = new SearchJobsRequest.Types.CustomRankingInfo(), #pragma warning disable CS0612 DisableKeywordMatch = true, #pragma warning restore CS0612 KeywordMatchMode = SearchJobsRequest.Types.KeywordMatchMode.Unspecified, }; SearchJobsResponse expectedResponse = new SearchJobsResponse { MatchingJobs = { new SearchJobsResponse.Types.MatchingJob(), }, HistogramQueryResults = { new HistogramQueryResult(), }, NextPageToken = "next_page_tokendbee0940", LocationFilters = { new Location(), }, TotalSize = 1202968108, Metadata = new ResponseMetadata(), BroadenedQueryJobsCount = 2131480093, SpellCorrection = new SpellingCorrection(), }; mockGrpcClient.Setup(x => x.SearchJobs(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); SearchJobsResponse response = client.SearchJobs(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task SearchJobsRequestObjectAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), SearchMode = SearchJobsRequest.Types.SearchMode.FeaturedJobSearch, RequestMetadata = new RequestMetadata(), JobQuery = new JobQuery(), EnableBroadening = false, HistogramQueries = { new HistogramQuery(), }, JobView = JobView.Small, Offset = 1472300666, MaxPageSize = -1271917583, PageToken = "page_tokenf09e5538", OrderBy = "order_byb4d33ada", DiversificationLevel = SearchJobsRequest.Types.DiversificationLevel.TwoPerCompany, CustomRankingInfo = new SearchJobsRequest.Types.CustomRankingInfo(), #pragma warning disable CS0612 DisableKeywordMatch = true, #pragma warning restore CS0612 KeywordMatchMode = SearchJobsRequest.Types.KeywordMatchMode.Unspecified, }; SearchJobsResponse expectedResponse = new SearchJobsResponse { MatchingJobs = { new SearchJobsResponse.Types.MatchingJob(), }, HistogramQueryResults = { new HistogramQueryResult(), }, NextPageToken = "next_page_tokendbee0940", LocationFilters = { new Location(), }, TotalSize = 1202968108, Metadata = new ResponseMetadata(), BroadenedQueryJobsCount = 2131480093, SpellCorrection = new SpellingCorrection(), }; mockGrpcClient.Setup(x => x.SearchJobsAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<SearchJobsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); SearchJobsResponse responseCallSettings = await client.SearchJobsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); SearchJobsResponse responseCancellationToken = await client.SearchJobsAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public void SearchJobsForAlertRequestObject() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), SearchMode = SearchJobsRequest.Types.SearchMode.FeaturedJobSearch, RequestMetadata = new RequestMetadata(), JobQuery = new JobQuery(), EnableBroadening = false, HistogramQueries = { new HistogramQuery(), }, JobView = JobView.Small, Offset = 1472300666, MaxPageSize = -1271917583, PageToken = "page_tokenf09e5538", OrderBy = "order_byb4d33ada", DiversificationLevel = SearchJobsRequest.Types.DiversificationLevel.TwoPerCompany, CustomRankingInfo = new SearchJobsRequest.Types.CustomRankingInfo(), #pragma warning disable CS0612 DisableKeywordMatch = true, #pragma warning restore CS0612 KeywordMatchMode = SearchJobsRequest.Types.KeywordMatchMode.Unspecified, }; SearchJobsResponse expectedResponse = new SearchJobsResponse { MatchingJobs = { new SearchJobsResponse.Types.MatchingJob(), }, HistogramQueryResults = { new HistogramQueryResult(), }, NextPageToken = "next_page_tokendbee0940", LocationFilters = { new Location(), }, TotalSize = 1202968108, Metadata = new ResponseMetadata(), BroadenedQueryJobsCount = 2131480093, SpellCorrection = new SpellingCorrection(), }; mockGrpcClient.Setup(x => x.SearchJobsForAlert(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); SearchJobsResponse response = client.SearchJobsForAlert(request); xunit::Assert.Same(expectedResponse, response); mockGrpcClient.VerifyAll(); } [xunit::FactAttribute] public async stt::Task SearchJobsForAlertRequestObjectAsync() { moq::Mock<JobService.JobServiceClient> mockGrpcClient = new moq::Mock<JobService.JobServiceClient>(moq::MockBehavior.Strict); mockGrpcClient.Setup(x => x.CreateOperationsClient()).Returns(new moq::Mock<lro::Operations.OperationsClient>().Object); SearchJobsRequest request = new SearchJobsRequest { ParentAsTenantName = TenantName.FromProjectTenant("[PROJECT]", "[TENANT]"), SearchMode = SearchJobsRequest.Types.SearchMode.FeaturedJobSearch, RequestMetadata = new RequestMetadata(), JobQuery = new JobQuery(), EnableBroadening = false, HistogramQueries = { new HistogramQuery(), }, JobView = JobView.Small, Offset = 1472300666, MaxPageSize = -1271917583, PageToken = "page_tokenf09e5538", OrderBy = "order_byb4d33ada", DiversificationLevel = SearchJobsRequest.Types.DiversificationLevel.TwoPerCompany, CustomRankingInfo = new SearchJobsRequest.Types.CustomRankingInfo(), #pragma warning disable CS0612 DisableKeywordMatch = true, #pragma warning restore CS0612 KeywordMatchMode = SearchJobsRequest.Types.KeywordMatchMode.Unspecified, }; SearchJobsResponse expectedResponse = new SearchJobsResponse { MatchingJobs = { new SearchJobsResponse.Types.MatchingJob(), }, HistogramQueryResults = { new HistogramQueryResult(), }, NextPageToken = "next_page_tokendbee0940", LocationFilters = { new Location(), }, TotalSize = 1202968108, Metadata = new ResponseMetadata(), BroadenedQueryJobsCount = 2131480093, SpellCorrection = new SpellingCorrection(), }; mockGrpcClient.Setup(x => x.SearchJobsForAlertAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<SearchJobsResponse>(stt::Task.FromResult(expectedResponse), null, null, null, null)); JobServiceClient client = new JobServiceClientImpl(mockGrpcClient.Object, null); SearchJobsResponse responseCallSettings = await client.SearchJobsForAlertAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None)); xunit::Assert.Same(expectedResponse, responseCallSettings); SearchJobsResponse responseCancellationToken = await client.SearchJobsForAlertAsync(request, st::CancellationToken.None); xunit::Assert.Same(expectedResponse, responseCancellationToken); mockGrpcClient.VerifyAll(); } } }
/*! * X-UniTMX: A tiled map editor file importer for Unity3d * https://bitbucket.org/Chaoseiro/x-unitmx * * Copyright 2013-2014 Guilherme "Chaoseiro" Maia * 2014 Mario Madureira Fontes */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using UnityEngine; namespace X_UniTMX { /// <summary> /// Tile SpriteEffects apllied in a TileLayer /// </summary> public class SpriteEffects { /// <summary> /// Flag for Tile Flipped Horizontally /// </summary> public bool flippedHorizontally = false; /// <summary> /// Flag for Tile Flipped Vertically /// </summary> public bool flippedVertically = false; /// <summary> /// Flag for Tile Flipped AntiDiagonally (Diagonally reversed) /// </summary> public bool flippedAntiDiagonally = false; } /// <summary> /// A single Tile in a TileLayer. /// </summary> public class Tile { /// <summary> /// Gets this Tile's original ID (the first set in Tiled) /// </summary> public int OriginalID { get; private set; } /// <summary> /// Gets this Tile's current ID (this can be changed ingame when TileLayer.SetTile is called) /// </summary> public int CurrentID { get; set; } /// <summary> /// Gets the Texture2D to use when drawing the tile. /// </summary> public TileSet TileSet { get; set; } /// <summary> /// Gets the source rectangle of the tile. /// </summary> public Rect Source { get; private set; } /// <summary> /// Gets the collection of properties for the tile. /// </summary> public PropertyCollection Properties { get; private set; } /// <summary> /// Gets or sets a color associated with the tile. /// </summary> public Color Color { get; set; } /// <summary> /// Gets or sets the SpriteEffects applied when drawing this tile. /// </summary> public SpriteEffects SpriteEffects { get; set; } /// <summary> /// Gets or sets this Tile Unity's GameObject /// </summary> public GameObject TileGameObject { get; set; } /// <summary> /// Gets or sets this Tile's Sprite /// </summary> public Sprite TileSprite { get; set; } /// <summary> /// Gets the Map's Tile Width, used to calculate Texture's pixelsToUnits /// </summary> public int MapTileWidth { get; protected set; } /// <summary> /// Creates a new Tile object. /// </summary> /// <param name="tileSet">The TileSet that contains the tile image.</param> /// <param name="source">The source rectangle of the tile.</param> /// <param name="OriginalID">This Tile's ID</param> public Tile(TileSet tileSet, Rect source, int OriginalID) : this(tileSet, source, OriginalID, new PropertyCollection(), Vector2.zero) { } /// <summary> /// Creates a new Tile object. /// </summary> /// <param name="tileSet">The TileSet that contains the tile image.</param> /// <param name="source">The source rectangle of the tile.</param> /// <param name="OriginalID">This Tile's ID</param> /// <param name="properties">The initial property collection or null to create an empty property collection.</param> /// <param name="pivot">The Tile's Sprite Pivot Point</param> /// <param name="mapTileWidth">The Map's TileWidth this tile is inside, used to calculate sprite's pixelsToUnits</param> public Tile(TileSet tileSet, Rect source, int OriginalID, PropertyCollection properties, Vector2 pivot, int mapTileWidth = 0) { if (tileSet == null) throw new ArgumentNullException("tileSet"); this.OriginalID = OriginalID; CurrentID = OriginalID; TileSet = tileSet; Source = source; Properties = properties ?? new PropertyCollection(); Color = Color.white; SpriteEffects = new X_UniTMX.SpriteEffects(); MapTileWidth = mapTileWidth; if (mapTileWidth <= 0) MapTileWidth = TileSet.TileWidth; CreateSprite(pivot); } /// <summary> /// Creates a new Tile without creating the Sprite /// </summary> /// <param name="tileSet">The TileSet that contains the tile image.</param> /// <param name="source">The source rectangle of the tile.</param> /// <param name="OriginalID">This Tile's ID</param> /// <param name="properties">The initial property collection or null to create an empty property collection.</param> internal Tile(TileSet tileSet, Rect source, int OriginalID, PropertyCollection properties, int mapTileWidth = 0) { this.OriginalID = OriginalID; CurrentID = OriginalID; TileSet = tileSet; Source = source; Properties = properties ?? new PropertyCollection(); Color = Color.white; SpriteEffects = new X_UniTMX.SpriteEffects(); if (mapTileWidth <= 0) MapTileWidth = TileSet.TileWidth; } /// <summary> /// Creates this Tile's Sprite /// </summary> /// <param name="pivot">Sprite Pivot Point</param> protected void CreateSprite(Vector2 pivot) { // Create Sprite TileSprite = Sprite.Create(TileSet.Texture, Source, pivot, MapTileWidth, (uint)(TileSet.Spacing * 2)); TileSprite.name = OriginalID.ToString(); } /// <summary> /// Creates this Tile's GameObject (TileGameObject) /// </summary> /// <param name="objectName">Desired name</param> /// <param name="parent">GameObject's parent</param> /// <param name="sortingLayerName">Sprite's sorting layer name</param> /// <param name="sortingLayerOrder">Sprite's sorting layer order</param> /// <param name="position">GameObject's position</param> /// <param name="materials">List of shared materials</param> /// <param name="opacity">This Object's Opacity</param> public void CreateTileObject(string objectName, Transform parent, string sortingLayerName, int sortingLayerOrder, Vector3 position, List<Material> materials, float opacity = 1.0f) { TileGameObject = new GameObject(objectName); TileGameObject.transform.parent = parent; SpriteRenderer tileRenderer = TileGameObject.AddComponent<SpriteRenderer>(); tileRenderer.sprite = TileSprite; // Use Layer's name as Sorting Layer tileRenderer.sortingLayerName = sortingLayerName; tileRenderer.sortingOrder = sortingLayerOrder; TileGameObject.transform.localScale = new Vector2(1, 1); TileGameObject.transform.localPosition = new Vector3(position.x, position.y, position.z); if (this.SpriteEffects != null) { if (this.SpriteEffects.flippedHorizontally || this.SpriteEffects.flippedVertically || this.SpriteEffects.flippedAntiDiagonally) { // MARIO: Fixed flippedHorizontally, flippedVertically and flippedAntiDiagonally effects float ratioHW = TileSet.TileHeight / (float)MapTileWidth; //if (this.SpriteEffects.flippedHorizontally == false && // this.SpriteEffects.flippedVertically == false && // this.SpriteEffects.flippedAntiDiagonally == false) //{ // TileGameObject.transform.localScale = new Vector2(1, 1); // TileGameObject.transform.localPosition = new Vector3(position.x, position.y, position.z); //} if (this.SpriteEffects.flippedHorizontally == true && this.SpriteEffects.flippedVertically == false && this.SpriteEffects.flippedAntiDiagonally == false) { TileGameObject.transform.localScale = new Vector2(-1, 1); TileGameObject.transform.localPosition = new Vector3(position.x + 1, position.y, position.z); } if (this.SpriteEffects.flippedHorizontally == false && this.SpriteEffects.flippedVertically == true && this.SpriteEffects.flippedAntiDiagonally == false) { TileGameObject.transform.localScale = new Vector2(1, -1); TileGameObject.transform.localPosition = new Vector3(position.x, position.y + ratioHW, position.z); } if (this.SpriteEffects.flippedHorizontally == true && this.SpriteEffects.flippedVertically == true && this.SpriteEffects.flippedAntiDiagonally == false) { TileGameObject.transform.localScale = new Vector2(-1, -1); TileGameObject.transform.localPosition = new Vector3(position.x + 1, position.y + ratioHW, position.z); } if (this.SpriteEffects.flippedHorizontally == false && this.SpriteEffects.flippedVertically == false && this.SpriteEffects.flippedAntiDiagonally == true) { TileGameObject.transform.Rotate(Vector3.forward, 90); TileGameObject.transform.localScale = new Vector2(-1, 1); TileGameObject.transform.localPosition = new Vector3(position.x + ratioHW, position.y + 1, position.z); } if (this.SpriteEffects.flippedHorizontally == true && this.SpriteEffects.flippedVertically == false && this.SpriteEffects.flippedAntiDiagonally == true) { TileGameObject.transform.Rotate(Vector3.forward, -90); TileGameObject.transform.localPosition = new Vector3(position.x, position.y + 1, position.z); } if (this.SpriteEffects.flippedHorizontally == false && this.SpriteEffects.flippedVertically == true && this.SpriteEffects.flippedAntiDiagonally == true) { TileGameObject.transform.Rotate(Vector3.forward, 90); TileGameObject.transform.localPosition = new Vector3(position.x + ratioHW, position.y, position.z); } if (this.SpriteEffects.flippedHorizontally == true && this.SpriteEffects.flippedVertically == true && this.SpriteEffects.flippedAntiDiagonally == true) { TileGameObject.transform.Rotate(Vector3.forward, 90); TileGameObject.transform.localScale = new Vector2(1, -1); TileGameObject.transform.localPosition = new Vector3(position.x, position.y, position.z); } } } for (int k = 0; k < materials.Count; k++) { if (materials[k].mainTexture.name == TileSet.Texture.name) { tileRenderer.sharedMaterial = materials[k]; break; } } if (opacity < 1) tileRenderer.sharedMaterial.color = new Color(1, 1, 1, opacity); } /// <summary> /// Creates a copy of the current tile. /// </summary> /// <returns>A new Tile with the same properties as the current tile.</returns> public virtual Tile Clone() { Tile t = new Tile(TileSet, Source, OriginalID, Properties); t.TileSprite = TileSprite; t.SpriteEffects = SpriteEffects; t.MapTileWidth = MapTileWidth; return t; } /// <summary> /// Creates a copy of the current tile with a different pivot point. /// </summary> /// <param name="pivot">New pivot point</param> /// <returns>A new Tile with the same properties as the current tile.</returns> public virtual Tile Clone(Vector2 pivot) { return new Tile(TileSet, Source, OriginalID, Properties, pivot); } /// <summary> /// Gets a string property /// </summary> /// <param name="property">Name of the property inside Tiled</param> /// <returns>The value of the property, String.Empty if property not found</returns> public string GetPropertyAsString(string property) { if (Properties == null) return string.Empty; return Properties.GetPropertyAsString(property); } /// <summary> /// Gets a boolean property /// </summary> /// <param name="property">Name of the property inside Tiled</param> /// <returns>The value of the property</returns> public bool GetPropertyAsBoolean(string property) { if (Properties == null) return false; return Properties.GetPropertyAsBoolean(property); } /// <summary> /// Gets an integer property /// </summary> /// <param name="property">Name of the property inside Tiled</param> /// <returns>The value of the property</returns> public int GetPropertyAsInt(string property) { if (Properties == null) return 0; return Properties.GetPropertyAsInt(property); } /// <summary> /// Gets a float property /// </summary> /// <param name="property">Name of the property inside Tiled</param> /// <returns>The value of the property</returns> public float GetPropertyAsFloat(string property) { if (Properties == null) return 0; return Properties.GetPropertyAsFloat(property); } /// <summary> /// Checks if a property exists /// </summary> /// <param name="property">Name of the property inside Tiled</param> /// <returns>true if property exists, false otherwise</returns> public bool HasProperty(string property) { if (Properties == null) return false; Property p; if (Properties.TryGetValue(property.ToLowerInvariant(), out p)) return true; return false; } } }
// // ColorGradientWidget.cs // // Author: // Krzysztof Marecki <marecki.krzysztof@gmail.com> // // Copyright (c) 2010 Krzysztof Marecki // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.ComponentModel; using System.Linq; using Cairo; using Gtk; using Pinta.Core; namespace Pinta.Gui.Widgets { [ToolboxItem(true)] public class ColorGradientWidget : FilledAreaBin { private EventBox eventbox; //gradient horizontal padding private const double xpad = 0.15; //gradient vertical padding private const double ypad = 0.03; private double[] vals; private Rectangle GradientRectangle { get { Rectangle rect = Allocation.ToCairoRectangle (); double x = rect.X + xpad * rect.Width; double y = rect.Y + ypad * rect.Height; double width = (1 - 2 * xpad) * rect.Width; double height = (1 - 2 * ypad) * rect.Height; return new Rectangle (x, y, width, height); } } [Category("Custom Properties")] public int Count { get { return vals.Length; } set { if (value < 2 || value > 3) { throw new ArgumentOutOfRangeException("value", value, "Count must be 2 or 3"); } vals = new double[value]; double step = 256 / (value - 1); for (int i = 0; i < value ; i++) { vals [i] = i * step - ((i != 0) ? 1 : 0); } } } public Color MaxColor { get; set; } public int ValueIndex { get; private set; } public ColorGradientWidget () { Build (); ValueIndex = -1; eventbox.MotionNotifyEvent += HandleMotionNotifyEvent; eventbox.LeaveNotifyEvent += HandleLeaveNotifyEvent; eventbox.ButtonPressEvent += HandleButtonPressEvent; eventbox.ButtonReleaseEvent += HandleButtonReleaseEvent; ExposeEvent += HandleExposeEvent; } public int GetValue (int i) { return (int) vals [i]; } public void SetValue (int i, int val) { if ((int)vals [i] != val) { vals [i] = val; OnValueChanged (i); } } private double GetYFromValue (double val) { Rectangle rect = GradientRectangle; Rectangle all = Allocation.ToCairoRectangle (); return all.Y + ypad * all.Height + rect.Height * (255 - val) / 255; } private double NormalizeY (int index, double py) { Rectangle rect = GradientRectangle; var yvals = (from val in vals select GetYFromValue (val)).Concat( new double[] {rect.Y, rect.Y + rect.Height}).OrderByDescending ( v => v).ToArray(); index++; if (py >= yvals [index - 1]) { py = yvals [index - 1]; } else if (py < yvals [index + 1]) { py = yvals [index + 1]; } return py; } private int GetValueFromY (double py) { Rectangle rect = GradientRectangle; Rectangle all = Allocation.ToCairoRectangle (); py -= all.Y + ypad * all.Height; return ((int)(255 * (rect.Height - py) / rect.Height)); } private int FindValueIndex(int y) { if (ValueIndex == -1) { var yvals = (from val in vals select GetYFromValue (val)).ToArray (); int count = Count - 1; for (int i = 0; i < count; i++) { double y1 = yvals [i]; double y2 = yvals [i + 1]; double h = (y1 - y2) / 2; // pointer is below the lowest value triangle if (i == 0 && y1 < y) return i; // pointer is above the highest value triangle if (i == (count - 1) && y2 > y) return i + 1; // pointer is outside i and i + 1 value triangles if (!(y1 >= y && y >= y2)) continue; // pointer is closer to lower value triangle if (y1 - y <= h) return i; // pointer is closer to higher value triangle if (y - y2 <= h) return i + 1; } return -1; } else { return ValueIndex; } } private void HandleMotionNotifyEvent (object o, Gtk.MotionNotifyEventArgs args) { int px, py; Gdk.ModifierType mask; GdkWindow.GetPointer (out px, out py, out mask); int index = FindValueIndex (py); py = (int)NormalizeY (index, py); if (mask == Gdk.ModifierType.Button1Mask) { if (index != -1) { double y = GetValueFromY (py); vals[index] = y; OnValueChanged (index); } } //to avoid unnessesary costly redrawing if (index != -1) GdkWindow.Invalidate (); } private void HandleLeaveNotifyEvent (object o, Gtk.LeaveNotifyEventArgs args) { if (args.Event.State != Gdk.ModifierType.Button1Mask) ValueIndex = -1; GdkWindow.Invalidate (); } void HandleButtonPressEvent (object o, Gtk.ButtonPressEventArgs args) { int px, py; Gdk.ModifierType mask; GdkWindow.GetPointer (out px, out py, out mask); int index = FindValueIndex ((int)py); if (index != -1) ValueIndex = index; } void HandleButtonReleaseEvent (object o, Gtk.ButtonReleaseEventArgs args) { ValueIndex = -1; } private void DrawGradient (Context g) { Rectangle rect = GradientRectangle; Gradient pat = new LinearGradient(rect.X, rect.Y, rect.X, rect.Y + rect.Height); pat.AddColorStop (0, MaxColor); pat.AddColorStop (1, new Cairo.Color (0, 0, 0)); g.Rectangle (rect); g.SetSource (pat); g.Fill(); } private void DrawTriangles (Context g) { int px, py; Gdk.ModifierType mask; GdkWindow.GetPointer (out px, out py, out mask); Rectangle rect = GradientRectangle; Rectangle all = Allocation.ToCairoRectangle(); int index = FindValueIndex (py); for (int i = 0; i < Count; i++) { double val = vals [i]; double y = GetYFromValue (val); bool hoover = ((index == i)) && (all.ContainsPoint (px, py) || ValueIndex != -1); Color color = hoover ? new Color (0.1, 0.1, 0.9) : new Color (0.1, 0.1, 0.1); //left triangle PointD[] points = new PointD[] { new PointD (rect.X, y), new PointD (rect.X - xpad * rect.Width, y + ypad * rect.Height), new PointD (rect.X - xpad * rect.Width, y - ypad * rect.Height) }; g.FillPolygonal (points, color); double x = rect.X + rect.Width; //right triangle PointD[] points2 = new PointD[] { new PointD (x , y), new PointD (x + xpad * rect.Width, y + ypad * rect.Height), new PointD (x + xpad * rect.Width, y - ypad * rect.Height) }; g.FillPolygonal (points2, color); } } private void HandleExposeEvent (object o, Gtk.ExposeEventArgs args) { using (Context g = Gdk.CairoHelper.Create (this.GdkWindow)) { DrawGradient (g); DrawTriangles (g); } } #region Protected Methods protected void OnValueChanged(int index) { if (ValueChanged != null) { ValueChanged(this, new IndexEventArgs (index)); } } #endregion #region Public Events public event IndexEventHandler ValueChanged; #endregion private void Build () { CanFocus = true; Events = (Gdk.EventMask)1534; eventbox = new EventBox (); eventbox.Events = (Gdk.EventMask)790; eventbox.VisibleWindow = false; Add (eventbox); } } }
using System.Drawing; namespace ToolStripCustomizer.ColorTables { internal sealed class VS2013DarkColorTable : PresetColorTable { public VS2013DarkColorTable() : base( "Visual Studio 2013 Dark" ) { } public override Color ButtonSelectedHighlight { get { return ButtonSelectedGradientMiddle; } } public override Color ButtonSelectedHighlightBorder { get { return ButtonSelectedBorder; } } public override Color ButtonPressedHighlight { get { return ButtonPressedGradientMiddle; } } public override Color ButtonPressedHighlightBorder { get { return ButtonPressedBorder; } } public override Color ButtonCheckedHighlight { get { return ButtonCheckedGradientMiddle; } } public override Color ButtonCheckedHighlightBorder { get { return ButtonSelectedBorder; } } public override Color ButtonPressedBorder { get { return ButtonSelectedBorder; } } public override Color ButtonSelectedBorder { get { return Color.FromArgb( 255, 98, 98, 98 ); } } public override Color ButtonCheckedGradientBegin { get { return Color.FromArgb( 255, 144, 144, 144 ); } } public override Color ButtonCheckedGradientMiddle { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonCheckedGradientEnd { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonSelectedGradientBegin { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonSelectedGradientMiddle { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonSelectedGradientEnd { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonPressedGradientBegin { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonPressedGradientMiddle { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color ButtonPressedGradientEnd { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color CheckBackground { get { return Color.FromArgb( 255, 173, 173, 173 ); } } public override Color CheckSelectedBackground { get { return Color.FromArgb( 255, 173, 173, 173 ); } } public override Color CheckPressedBackground { get { return Color.FromArgb( 255, 140, 140, 140 ); } } public override Color GripDark { get { return Color.FromArgb( 255, 22, 22, 22 ); } } public override Color GripLight { get { return Color.FromArgb( 255, 83, 83, 83 ); } } public override Color ImageMarginGradientBegin { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color ImageMarginGradientMiddle { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color ImageMarginGradientEnd { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color ImageMarginRevealedGradientBegin { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color ImageMarginRevealedGradientMiddle { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color ImageMarginRevealedGradientEnd { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color MenuStripGradientBegin { get { return Color.FromArgb( 255, 138, 138, 138 ); } } public override Color MenuStripGradientEnd { get { return Color.FromArgb( 255, 138, 138, 138 ); } } public override Color MenuItemSelected { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color MenuItemBorder { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color MenuBorder { get { return Color.FromArgb( 255, 22, 22, 22 ); } } public override Color MenuItemSelectedGradientBegin { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color MenuItemSelectedGradientEnd { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color MenuItemPressedGradientBegin { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color MenuItemPressedGradientMiddle { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color MenuItemPressedGradientEnd { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color RaftingContainerGradientBegin { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color RaftingContainerGradientEnd { get { return Color.FromArgb( 255, 170, 170, 170 ); } } public override Color SeparatorDark { get { return Color.FromArgb( 255, 22, 22, 22 ); } } public override Color SeparatorLight { get { return Color.FromArgb( 255, 62, 62, 62 ); } } public override Color StatusStripGradientBegin { get { return Color.FromArgb( 255, 37, 37, 38 ); } } public override Color StatusStripGradientEnd { get { return Color.FromArgb( 255, 37, 37, 38 ); } } public override Color ToolStripBorder { get { return Color.FromArgb( 255, 22, 22, 22 ); } } public override Color ToolStripDropDownBackground { get { return Color.FromArgb( 255, 125, 125, 125 ); } } public override Color ToolStripGradientBegin { get { return Color.FromName( "DimGray" ); } } public override Color ToolStripGradientMiddle { get { return Color.FromArgb( 255, 89, 89, 89 ); } } public override Color ToolStripGradientEnd { get { return Color.FromArgb( 255, 88, 88, 88 ); } } public override Color ToolStripContentPanelGradientBegin { get { return Color.FromArgb( 255, 68, 68, 68 ); } } public override Color ToolStripContentPanelGradientEnd { get { return Color.FromArgb( 255, 68, 68, 68 ); } } public override Color ToolStripPanelGradientBegin { get { return Color.FromArgb( 255, 103, 103, 103 ); } } public override Color ToolStripPanelGradientEnd { get { return Color.FromArgb( 255, 103, 103, 103 ); } } public override Color OverflowButtonGradientBegin { get { return Color.FromArgb( 255, 103, 103, 103 ); } } public override Color OverflowButtonGradientMiddle { get { return Color.FromArgb( 255, 103, 103, 103 ); } } public override Color OverflowButtonGradientEnd { get { return Color.FromArgb( 255, 79, 79, 79 ); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using IdentitySample.Models; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Migrations; namespace IdentitySample.Data.Migrations { [DbContext(typeof(ApplicationDbContext))] [Migration("00000000000000_CreateIdentitySchema")] partial class CreateIdentitySchema { protected override void BuildTargetModel(ModelBuilder modelBuilder) { #pragma warning disable 612, 618 modelBuilder .HasAnnotation("ProductVersion", "1.0.0-rc3") .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRole", b => { b.Property<string>("Id"); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<string>("Name") .HasAnnotation("MaxLength", 256); b.Property<string>("NormalizedName") .HasAnnotation("MaxLength", 256); b.HasKey("Id"); b.HasIndex("NormalizedName") .HasDatabaseName("RoleNameIndex"); b.ToTable("AspNetRoles"); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRoleClaim<string>", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<string>("RoleId") .IsRequired(); b.HasKey("Id"); b.HasIndex("RoleId"); b.ToTable("AspNetRoleClaims"); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserClaim<string>", b => { b.Property<int>("Id") .ValueGeneratedOnAdd(); b.Property<string>("ClaimType"); b.Property<string>("ClaimValue"); b.Property<string>("UserId") .IsRequired(); b.HasKey("Id"); b.HasIndex("UserId"); b.ToTable("AspNetUserClaims"); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserLogin<string>", b => { b.Property<string>("LoginProvider"); b.Property<string>("ProviderKey"); b.Property<string>("ProviderDisplayName"); b.Property<string>("UserId") .IsRequired(); b.HasKey("LoginProvider", "ProviderKey"); b.HasIndex("UserId"); b.ToTable("AspNetUserLogins"); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserRole<string>", b => { b.Property<string>("UserId"); b.Property<string>("RoleId"); b.HasKey("UserId", "RoleId"); b.HasIndex("RoleId"); b.HasIndex("UserId"); b.ToTable("AspNetUserRoles"); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserToken<string>", b => { b.Property<string>("UserId"); b.Property<string>("LoginProvider"); b.Property<string>("Name"); b.Property<string>("Value"); b.HasKey("UserId", "LoginProvider", "Name"); b.ToTable("AspNetUserTokens"); }); modelBuilder.Entity("WebApplication13.Models.ApplicationUser", b => { b.Property<string>("Id"); b.Property<int>("AccessFailedCount"); b.Property<string>("ConcurrencyStamp") .IsConcurrencyToken(); b.Property<string>("Email") .HasAnnotation("MaxLength", 256); b.Property<bool>("EmailConfirmed"); b.Property<bool>("LockoutEnabled"); b.Property<DateTimeOffset?>("LockoutEnd"); b.Property<string>("NormalizedEmail") .HasAnnotation("MaxLength", 256); b.Property<string>("NormalizedUserName") .HasAnnotation("MaxLength", 256); b.Property<string>("PasswordHash"); b.Property<string>("PhoneNumber"); b.Property<bool>("PhoneNumberConfirmed"); b.Property<string>("SecurityStamp"); b.Property<bool>("TwoFactorEnabled"); b.Property<string>("UserName") .HasAnnotation("MaxLength", 256); b.HasKey("Id"); b.HasIndex("NormalizedEmail") .HasDatabaseName("EmailIndex"); b.HasIndex("NormalizedUserName") .IsUnique() .HasDatabaseName("UserNameIndex"); b.ToTable("AspNetUsers"); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRoleClaim<string>", b => { b.HasOne("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRole") .WithMany("Claims") .HasForeignKey("RoleId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserClaim<string>", b => { b.HasOne("WebApplication13.Models.ApplicationUser") .WithMany("Claims") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserLogin<string>", b => { b.HasOne("WebApplication13.Models.ApplicationUser") .WithMany("Logins") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserRole<string>", b => { b.HasOne("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRole") .WithMany("Users") .HasForeignKey("RoleId") .OnDelete(DeleteBehavior.Cascade); b.HasOne("WebApplication13.Models.ApplicationUser") .WithMany("Roles") .HasForeignKey("UserId") .OnDelete(DeleteBehavior.Cascade); }); #pragma warning restore 612, 618 } } }
using System; using System.Diagnostics; using Lucene.Net.Documents; namespace Lucene.Net.Search { using Lucene.Net.Index; using NUnit.Framework; using AtomicReaderContext = Lucene.Net.Index.AtomicReaderContext; using BytesRef = Lucene.Net.Util.BytesRef; using Directory = Lucene.Net.Store.Directory; using DirectoryReader = Lucene.Net.Index.DirectoryReader; using Document = Documents.Document; using Field = Field; using FieldType = FieldType; using FloatField = FloatField; using IndexReader = Lucene.Net.Index.IndexReader; using IntField = IntField; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; using MultiFields = Lucene.Net.Index.MultiFields; using NumericUtils = Lucene.Net.Util.NumericUtils; using RandomIndexWriter = Lucene.Net.Index.RandomIndexWriter; using SlowCompositeReaderWrapper = Lucene.Net.Index.SlowCompositeReaderWrapper; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; using TestNumericUtils = Lucene.Net.Util.TestNumericUtils; // NaN arrays using TestUtil = Lucene.Net.Util.TestUtil; [TestFixture] public class TestNumericRangeQuery32 : LuceneTestCase { // distance of entries private static int Distance; // shift the starting of the values to the left, to also have negative values: private static readonly int StartOffset = -1 << 15; // number of docs to generate for testing private static int NoDocs; private static Directory Directory = null; private static IndexReader Reader = null; private static IndexSearcher Searcher = null; [TestFixtureSetUp] public static void BeforeClass() { NoDocs = AtLeast(4096); Distance = (1 << 30) / NoDocs; Directory = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 100, 1000)).SetMergePolicy(NewLogMergePolicy())); FieldType storedInt = new FieldType(IntField.TYPE_NOT_STORED); storedInt.Stored = true; storedInt.Freeze(); FieldType storedInt8 = new FieldType(storedInt); storedInt8.NumericPrecisionStep = 8; FieldType storedInt4 = new FieldType(storedInt); storedInt4.NumericPrecisionStep = 4; FieldType storedInt2 = new FieldType(storedInt); storedInt2.NumericPrecisionStep = 2; FieldType storedIntNone = new FieldType(storedInt); storedIntNone.NumericPrecisionStep = int.MaxValue; FieldType unstoredInt = IntField.TYPE_NOT_STORED; FieldType unstoredInt8 = new FieldType(unstoredInt); unstoredInt8.NumericPrecisionStep = 8; FieldType unstoredInt4 = new FieldType(unstoredInt); unstoredInt4.NumericPrecisionStep = 4; FieldType unstoredInt2 = new FieldType(unstoredInt); unstoredInt2.NumericPrecisionStep = 2; IntField field8 = new IntField("field8", 0, storedInt8), field4 = new IntField("field4", 0, storedInt4), field2 = new IntField("field2", 0, storedInt2), fieldNoTrie = new IntField("field" + int.MaxValue, 0, storedIntNone), ascfield8 = new IntField("ascfield8", 0, unstoredInt8), ascfield4 = new IntField("ascfield4", 0, unstoredInt4), ascfield2 = new IntField("ascfield2", 0, unstoredInt2); Document doc = new Document(); // add fields, that have a distance to test general functionality doc.Add(field8); doc.Add(field4); doc.Add(field2); doc.Add(fieldNoTrie); // add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive doc.Add(ascfield8); doc.Add(ascfield4); doc.Add(ascfield2); // Add a series of noDocs docs with increasing int values for (int l = 0; l < NoDocs; l++) { int val = Distance * l + StartOffset; field8.IntValue = val; field4.IntValue = val; field2.IntValue = val; fieldNoTrie.IntValue = val; val = l - (NoDocs / 2); ascfield8.IntValue = val; ascfield4.IntValue = val; ascfield2.IntValue = val; writer.AddDocument(doc); } Reader = writer.Reader; Searcher = NewSearcher(Reader); writer.Dispose(); } [TestFixtureTearDown] public static void AfterClass() { Searcher = null; Reader.Dispose(); Reader = null; Directory.Dispose(); Directory = null; } [SetUp] public override void SetUp() { base.SetUp(); // set the theoretical maximum term count for 8bit (see docs for the number) // super.tearDown will restore the default BooleanQuery.MaxClauseCount = 3 * 255 * 2 + 255; } /// <summary> /// test for both constant score and boolean query, the other tests only use the constant score mode </summary> private void TestRange(int precisionStep) { string field = "field" + precisionStep; int count = 3000; int lower = (Distance * 3 / 2) + StartOffset, upper = lower + count * Distance + (Distance / 3); NumericRangeQuery<int> q = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, true, true); NumericRangeFilter<int> f = NumericRangeFilter.NewIntRange(field, precisionStep, lower, upper, true, true); for (sbyte i = 0; i < 3; i++) { TopDocs topDocs; string type; switch (i) { case 0: type = " (constant score filter rewrite)"; q.SetRewriteMethod(MultiTermQuery.CONSTANT_SCORE_FILTER_REWRITE); topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER); break; case 1: type = " (constant score boolean rewrite)"; q.SetRewriteMethod(MultiTermQuery.CONSTANT_SCORE_BOOLEAN_QUERY_REWRITE); topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER); break; case 2: type = " (filter)"; topDocs = Searcher.Search(new MatchAllDocsQuery(), f, NoDocs, Sort.INDEXORDER); break; default: return; } ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(count, sd.Length, "Score doc count" + type); Document doc = Searcher.Doc(sd[0].Doc); Assert.AreEqual(2 * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "First doc" + type); doc = Searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((1 + count) * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "Last doc" + type); } } [Test] public virtual void TestRange_8bit() { TestRange(8); } [Test] public virtual void TestRange_4bit() { TestRange(4); } [Test] public virtual void TestRange_2bit() { TestRange(2); } [Test] public virtual void TestInverseRange() { AtomicReaderContext context = (AtomicReaderContext)SlowCompositeReaderWrapper.Wrap(Reader).Context; NumericRangeFilter<int> f = NumericRangeFilter.NewIntRange("field8", 8, 1000, -1000, true, true); Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A inverse range should return the null instance"); f = NumericRangeFilter.NewIntRange("field8", 8, int.MaxValue, null, false, false); Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range starting with Integer.MAX_VALUE should return the null instance"); f = NumericRangeFilter.NewIntRange("field8", 8, null, int.MinValue, false, false); Assert.IsNull(f.GetDocIdSet(context, (context.AtomicReader).LiveDocs), "A exclusive range ending with Integer.MIN_VALUE should return the null instance"); } [Test] public virtual void TestOneMatchQuery() { NumericRangeQuery<int> q = NumericRangeQuery.NewIntRange("ascfield8", 8, 1000, 1000, true, true); TopDocs topDocs = Searcher.Search(q, NoDocs); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(1, sd.Length, "Score doc count"); } private void TestLeftOpenRange(int precisionStep) { string field = "field" + precisionStep; int count = 3000; int upper = (count - 1) * Distance + (Distance / 3) + StartOffset; NumericRangeQuery<int> q = NumericRangeQuery.NewIntRange(field, precisionStep, null, upper, true, true); TopDocs topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(count, sd.Length, "Score doc count"); Document doc = Searcher.Doc(sd[0].Doc); Assert.AreEqual(StartOffset, (int)doc.GetField(field).NumericValue, "First doc"); doc = Searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((count - 1) * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "Last doc"); q = NumericRangeQuery.NewIntRange(field, precisionStep, null, upper, false, true); topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER); sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(count, sd.Length, "Score doc count"); doc = Searcher.Doc(sd[0].Doc); Assert.AreEqual(StartOffset, (int)doc.GetField(field).NumericValue, "First doc"); doc = Searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((count - 1) * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "Last doc"); } [Test] public virtual void TestLeftOpenRange_8bit() { TestLeftOpenRange(8); } [Test] public virtual void TestLeftOpenRange_4bit() { TestLeftOpenRange(4); } [Test] public virtual void TestLeftOpenRange_2bit() { TestLeftOpenRange(2); } private void TestRightOpenRange(int precisionStep) { string field = "field" + precisionStep; int count = 3000; int lower = (count - 1) * Distance + (Distance / 3) + StartOffset; NumericRangeQuery<int> q = NumericRangeQuery.NewIntRange(field, precisionStep, lower, null, true, true); TopDocs topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER); ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(NoDocs - count, sd.Length, "Score doc count"); Document doc = Searcher.Doc(sd[0].Doc); Assert.AreEqual(count * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "First doc"); doc = Searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((NoDocs - 1) * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "Last doc"); q = NumericRangeQuery.NewIntRange(field, precisionStep, lower, null, true, false); topDocs = Searcher.Search(q, null, NoDocs, Sort.INDEXORDER); sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); Assert.AreEqual(NoDocs - count, sd.Length, "Score doc count"); doc = Searcher.Doc(sd[0].Doc); Assert.AreEqual(count * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "First doc"); doc = Searcher.Doc(sd[sd.Length - 1].Doc); Assert.AreEqual((NoDocs - 1) * Distance + StartOffset, (int)doc.GetField(field).NumericValue, "Last doc"); } [Test] public virtual void TestRightOpenRange_8bit() { TestRightOpenRange(8); } [Test] public virtual void TestRightOpenRange_4bit() { TestRightOpenRange(4); } public virtual void TestRightOpenRange_2bit() { TestRightOpenRange(2); } [Test] public virtual void TestInfiniteValues() { Directory dir = NewDirectory(); RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))); Document doc = new Document(); doc.Add(new FloatField("float", float.NegativeInfinity, Field.Store.NO)); doc.Add(new IntField("int", int.MinValue, Field.Store.NO)); writer.AddDocument(doc); doc = new Document(); doc.Add(new FloatField("float", float.PositiveInfinity, Field.Store.NO)); doc.Add(new IntField("int", int.MaxValue, Field.Store.NO)); writer.AddDocument(doc); doc = new Document(); doc.Add(new FloatField("float", 0.0f, Field.Store.NO)); doc.Add(new IntField("int", 0, Field.Store.NO)); writer.AddDocument(doc); foreach (float f in TestNumericUtils.FLOAT_NANs) { doc = new Document(); doc.Add(new FloatField("float", f, Field.Store.NO)); writer.AddDocument(doc); } writer.Dispose(); IndexReader r = DirectoryReader.Open(dir); IndexSearcher s = NewSearcher(r); Query q = NumericRangeQuery.NewIntRange("int", null, null, true, true); TopDocs topDocs = s.Search(q, 10); Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewIntRange("int", null, null, false, false); topDocs = s.Search(q, 10); Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewIntRange("int", int.MinValue, int.MaxValue, true, true); topDocs = s.Search(q, 10); Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewIntRange("int", int.MinValue, int.MaxValue, false, false); topDocs = s.Search(q, 10); Assert.AreEqual(1, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewFloatRange("float", null, null, true, true); topDocs = s.Search(q, 10); Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewFloatRange("float", null, null, false, false); topDocs = s.Search(q, 10); Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewFloatRange("float", float.NegativeInfinity, float.PositiveInfinity, true, true); topDocs = s.Search(q, 10); Assert.AreEqual(3, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewFloatRange("float", float.NegativeInfinity, float.PositiveInfinity, false, false); topDocs = s.Search(q, 10); Assert.AreEqual(1, topDocs.ScoreDocs.Length, "Score doc count"); q = NumericRangeQuery.NewFloatRange("float", float.NaN, float.NaN, true, true); topDocs = s.Search(q, 10); Assert.AreEqual(TestNumericUtils.FLOAT_NANs.Length, topDocs.ScoreDocs.Length, "Score doc count"); r.Dispose(); dir.Dispose(); } private void TestRandomTrieAndClassicRangeQuery(int precisionStep) { string field = "field" + precisionStep; int totalTermCountT = 0, totalTermCountC = 0, termCountT, termCountC; int num = TestUtil.NextInt(Random(), 10, 20); for (int i = 0; i < num; i++) { int lower = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset; int upper = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset; if (lower > upper) { int a = lower; lower = upper; upper = a; } BytesRef lowerBytes = new BytesRef(NumericUtils.BUF_SIZE_INT), upperBytes = new BytesRef(NumericUtils.BUF_SIZE_INT); NumericUtils.IntToPrefixCodedBytes(lower, 0, lowerBytes); NumericUtils.IntToPrefixCodedBytes(upper, 0, upperBytes); // test inclusive range NumericRangeQuery<int> tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, true, true); TermRangeQuery cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, true); TopDocs tTopDocs = Searcher.Search(tq, 1); TopDocs cTopDocs = Searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); // test exclusive range tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, false, false); cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, false); tTopDocs = Searcher.Search(tq, 1); cTopDocs = Searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); // test left exclusive range tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, false, true); cq = new TermRangeQuery(field, lowerBytes, upperBytes, false, true); tTopDocs = Searcher.Search(tq, 1); cTopDocs = Searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); // test right exclusive range tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, true, false); cq = new TermRangeQuery(field, lowerBytes, upperBytes, true, false); tTopDocs = Searcher.Search(tq, 1); cTopDocs = Searcher.Search(cq, 1); Assert.AreEqual(cTopDocs.TotalHits, tTopDocs.TotalHits, "Returned count for NumericRangeQuery and TermRangeQuery must be equal"); totalTermCountT += termCountT = CountTerms(tq); totalTermCountC += termCountC = CountTerms(cq); CheckTermCounts(precisionStep, termCountT, termCountC); } CheckTermCounts(precisionStep, totalTermCountT, totalTermCountC); if (VERBOSE && precisionStep != int.MaxValue) { Console.WriteLine("Average number of terms during random search on '" + field + "':"); Console.WriteLine(" Numeric query: " + (((double)totalTermCountT) / (num * 4))); Console.WriteLine(" Classical query: " + (((double)totalTermCountC) / (num * 4))); } } [Test] public virtual void TestEmptyEnums() { int count = 3000; int lower = (Distance * 3 / 2) + StartOffset, upper = lower + count * Distance + (Distance / 3); // test empty enum Debug.Assert(lower < upper); Assert.IsTrue(0 < CountTerms(NumericRangeQuery.NewIntRange("field4", 4, lower, upper, true, true))); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewIntRange("field4", 4, upper, lower, true, true))); // test empty enum outside of bounds lower = Distance * NoDocs + StartOffset; upper = 2 * lower; Debug.Assert(lower < upper); Assert.AreEqual(0, CountTerms(NumericRangeQuery.NewIntRange("field4", 4, lower, upper, true, true))); } private int CountTerms(MultiTermQuery q) { Terms terms = MultiFields.GetTerms(Reader, q.Field); if (terms == null) { return 0; } TermsEnum termEnum = q.GetTermsEnum(terms); Assert.IsNotNull(termEnum); int count = 0; BytesRef cur, last = null; while ((cur = termEnum.Next()) != null) { count++; if (last != null) { Assert.IsTrue(last.CompareTo(cur) < 0); } last = BytesRef.DeepCopyOf(cur); } // LUCENE-3314: the results after next() already returned null are undefined, // Assert.IsNull(termEnum.Next()); return count; } private void CheckTermCounts(int precisionStep, int termCountT, int termCountC) { if (precisionStep == int.MaxValue) { Assert.AreEqual(termCountC, termCountT, "Number of terms should be equal for unlimited precStep"); } else { Assert.IsTrue(termCountT <= termCountC, "Number of terms for NRQ should be <= compared to classical TRQ"); } } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_8bit() { TestRandomTrieAndClassicRangeQuery(8); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_4bit() { TestRandomTrieAndClassicRangeQuery(4); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_2bit() { TestRandomTrieAndClassicRangeQuery(2); } [Test] public virtual void TestRandomTrieAndClassicRangeQuery_NoTrie() { TestRandomTrieAndClassicRangeQuery(int.MaxValue); } private void TestRangeSplit(int precisionStep) { string field = "ascfield" + precisionStep; // 10 random tests int num = TestUtil.NextInt(Random(), 10, 20); for (int i = 0; i < num; i++) { int lower = (int)(Random().NextDouble() * NoDocs - NoDocs / 2); int upper = (int)(Random().NextDouble() * NoDocs - NoDocs / 2); if (lower > upper) { int a = lower; lower = upper; upper = a; } // test inclusive range Query tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, true, true); TopDocs tTopDocs = Searcher.Search(tq, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length"); // test exclusive range tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, false, false); tTopDocs = Searcher.Search(tq, 1); Assert.AreEqual(Math.Max(upper - lower - 1, 0), tTopDocs.TotalHits, "Returned count of range query must be equal to exclusive range length"); // test left exclusive range tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, false, true); tTopDocs = Searcher.Search(tq, 1); Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length"); // test right exclusive range tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, true, false); tTopDocs = Searcher.Search(tq, 1); Assert.AreEqual(upper - lower, tTopDocs.TotalHits, "Returned count of range query must be equal to half exclusive range length"); } } [Test] public virtual void TestRangeSplit_8bit() { TestRangeSplit(8); } [Test] public virtual void TestRangeSplit_4bit() { TestRangeSplit(4); } [Test] public virtual void TestRangeSplit_2bit() { TestRangeSplit(2); } /// <summary> /// we fake a float test using int2float conversion of NumericUtils </summary> private void TestFloatRange(int precisionStep) { string field = "ascfield" + precisionStep; const int lower = -1000, upper = +2000; Query tq = NumericRangeQuery.NewFloatRange(field, precisionStep, NumericUtils.SortableIntToFloat(lower), NumericUtils.SortableIntToFloat(upper), true, true); TopDocs tTopDocs = Searcher.Search(tq, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range query must be equal to inclusive range length"); Filter tf = NumericRangeFilter.NewFloatRange(field, precisionStep, NumericUtils.SortableIntToFloat(lower), NumericUtils.SortableIntToFloat(upper), true, true); tTopDocs = Searcher.Search(new MatchAllDocsQuery(), tf, 1); Assert.AreEqual(upper - lower + 1, tTopDocs.TotalHits, "Returned count of range filter must be equal to inclusive range length"); } [Test] public virtual void TestFloatRange_8bit() { TestFloatRange(8); } [Test] public virtual void TestFloatRange_4bit() { TestFloatRange(4); } [Test] public virtual void TestFloatRange_2bit() { TestFloatRange(2); } private void TestSorting(int precisionStep) { string field = "field" + precisionStep; // 10 random tests, the index order is ascending, // so using a reverse sort field should retun descending documents int num = TestUtil.NextInt(Random(), 10, 20); for (int i = 0; i < num; i++) { int lower = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset; int upper = (int)(Random().NextDouble() * NoDocs * Distance) + StartOffset; if (lower > upper) { int a = lower; lower = upper; upper = a; } Query tq = NumericRangeQuery.NewIntRange(field, precisionStep, lower, upper, true, true); TopDocs topDocs = Searcher.Search(tq, null, NoDocs, new Sort(new SortField(field, SortField.Type_e.INT, true))); if (topDocs.TotalHits == 0) { continue; } ScoreDoc[] sd = topDocs.ScoreDocs; Assert.IsNotNull(sd); int last = (int)Searcher.Doc(sd[0].Doc).GetField(field).NumericValue; for (int j = 1; j < sd.Length; j++) { int act = (int)Searcher.Doc(sd[j].Doc).GetField(field).NumericValue; Assert.IsTrue(last > act, "Docs should be sorted backwards"); last = act; } } } [Test] public virtual void TestSorting_8bit() { TestSorting(8); } [Test] public virtual void TestSorting_4bit() { TestSorting(4); } [Test] public virtual void TestSorting_2bit() { TestSorting(2); } [Test] public virtual void TestEqualsAndHash() { QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test1", 4, 10, 20, true, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test2", 4, 10, 20, false, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test3", 4, 10, 20, true, false)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test4", 4, 10, 20, false, false)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test5", 4, 10, null, true, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test6", 4, null, 20, true, true)); QueryUtils.CheckHashEquals(NumericRangeQuery.NewIntRange("test7", 4, null, null, true, true)); QueryUtils.CheckEqual(NumericRangeQuery.NewIntRange("test8", 4, 10, 20, true, true), NumericRangeQuery.NewIntRange("test8", 4, 10, 20, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewIntRange("test9", 4, 10, 20, true, true), NumericRangeQuery.NewIntRange("test9", 8, 10, 20, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewIntRange("test10a", 4, 10, 20, true, true), NumericRangeQuery.NewIntRange("test10b", 4, 10, 20, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewIntRange("test11", 4, 10, 20, true, true), NumericRangeQuery.NewIntRange("test11", 4, 20, 10, true, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewIntRange("test12", 4, 10, 20, true, true), NumericRangeQuery.NewIntRange("test12", 4, 10, 20, false, true)); QueryUtils.CheckUnequal(NumericRangeQuery.NewIntRange("test13", 4, 10, 20, true, true), NumericRangeQuery.NewFloatRange("test13", 4, 10f, 20f, true, true)); // the following produces a hash collision, because Long and Integer have the same hashcode, so only test equality: Query q1 = NumericRangeQuery.NewIntRange("test14", 4, 10, 20, true, true); Query q2 = NumericRangeQuery.NewLongRange("test14", 4, 10L, 20L, true, true); Assert.IsFalse(q1.Equals(q2)); Assert.IsFalse(q2.Equals(q1)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Drawing; using System.Drawing.Imaging; using System.Globalization; using System.IO; using Xunit; namespace System.ComponentModel.TypeConverterTests { public class ImageConverterTest { private readonly Image _image; private readonly ImageConverter _imgConv; private readonly ImageConverter _imgConvFrmTD; private readonly string _imageStr; private readonly byte[] _imageBytes; public ImageConverterTest() { _image = Image.FromFile(Path.Combine("bitmaps", "TestImage.bmp")); _imageStr = _image.ToString(); using (MemoryStream destStream = new MemoryStream()) { _image.Save(destStream, _image.RawFormat); _imageBytes = destStream.ToArray(); } _imgConv = new ImageConverter(); _imgConvFrmTD = (ImageConverter)TypeDescriptor.GetConverter(_image); } [ConditionalTheory(Helpers.IsDrawingSupported)] [InlineData("48x48_multiple_entries_4bit.ico")] [InlineData("256x256_seven_entries_multiple_bits.ico")] [InlineData("pngwithheight_icon.ico")] public void ImageConverterFromIconTest(string name) { using (var icon = new Icon(Helpers.GetTestBitmapPath(name))) { Bitmap IconBitmap = (Bitmap)_imgConv.ConvertFrom(icon); Assert.NotNull(IconBitmap); Assert.Equal(32, IconBitmap.Width); Assert.Equal(32, IconBitmap.Height); Assert.Equal(new Size(32, 32), IconBitmap.Size); } } [ConditionalFact(Helpers.IsDrawingSupported)] public void ImageWithOleHeader() { string path = Path.Combine("bitmaps", "TestImageWithOleHeader.bmp"); using (FileStream fileStream = File.Open(path, FileMode.Open)) { using (var ms = new MemoryStream()) { fileStream.CopyTo(ms); var converter = new ImageConverter(); object image = converter.ConvertFrom(ms.ToArray()); Assert.NotNull(image); } } } [ConditionalFact(Helpers.IsDrawingSupported)] public void TestCanConvertFrom() { Assert.True(_imgConv.CanConvertFrom(typeof(byte[])), "byte[] (no context)"); Assert.True(_imgConv.CanConvertFrom(null, typeof(byte[])), "byte[]"); Assert.True(_imgConv.CanConvertFrom(null, _imageBytes.GetType()), "_imageBytes.GetType()"); Assert.True(_imgConv.CanConvertFrom(typeof(Icon)), "Icon (no context)"); Assert.True(_imgConv.CanConvertFrom(null, typeof(Icon)), "Icon"); Assert.False(_imgConv.CanConvertFrom(null, typeof(string)), "string"); Assert.False(_imgConv.CanConvertFrom(null, typeof(Rectangle)), "Rectangle"); Assert.False(_imgConv.CanConvertFrom(null, typeof(Point)), "Point"); Assert.False(_imgConv.CanConvertFrom(null, typeof(PointF)), "PointF"); Assert.False(_imgConv.CanConvertFrom(null, typeof(Size)), "Size"); Assert.False(_imgConv.CanConvertFrom(null, typeof(SizeF)), "SizeF"); Assert.False(_imgConv.CanConvertFrom(null, typeof(object)), "object"); Assert.False(_imgConv.CanConvertFrom(null, typeof(int)), "int"); Assert.False(_imgConv.CanConvertFrom(null, typeof(Metafile)), "Mefafile"); Assert.True(_imgConvFrmTD.CanConvertFrom(typeof(byte[])), "TD byte[] (no context)"); Assert.True(_imgConvFrmTD.CanConvertFrom(null, typeof(byte[])), "TD byte[]"); Assert.True(_imgConvFrmTD.CanConvertFrom(null, _imageBytes.GetType()), "TD _imageBytes.GetType()"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(string)), "TD string"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(Rectangle)), "TD Rectangle"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(Point)), "TD Point"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(PointF)), "TD PointF"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(Size)), "TD Size"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(SizeF)), "TD SizeF"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(object)), "TD object"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(int)), "TD int"); Assert.False(_imgConvFrmTD.CanConvertFrom(null, typeof(Metafile)), "TD Metafile"); } [ConditionalFact(Helpers.IsDrawingSupported)] public void TestCanConvertTo() { Assert.True(_imgConv.CanConvertTo(typeof(string)), "stirng (no context)"); Assert.True(_imgConv.CanConvertTo(null, typeof(string)), "string"); Assert.True(_imgConv.CanConvertTo(null, _imageStr.GetType()), "_imageStr.GetType()"); Assert.True(_imgConv.CanConvertTo(typeof(byte[])), "byte[] (no context)"); Assert.True(_imgConv.CanConvertTo(null, typeof(byte[])), "byte[]"); Assert.True(_imgConv.CanConvertTo(null, _imageBytes.GetType()), "_imageBytes.GetType()"); Assert.False(_imgConv.CanConvertTo(null, typeof(Rectangle)), "Rectangle"); Assert.False(_imgConv.CanConvertTo(null, typeof(Point)), "Point"); Assert.False(_imgConv.CanConvertTo(null, typeof(PointF)), "PointF"); Assert.False(_imgConv.CanConvertTo(null, typeof(Size)), "Size"); Assert.False(_imgConv.CanConvertTo(null, typeof(SizeF)), "SizeF"); Assert.False(_imgConv.CanConvertTo(null, typeof(object)), "object"); Assert.False(_imgConv.CanConvertTo(null, typeof(int)), "int"); Assert.True(_imgConvFrmTD.CanConvertTo(typeof(string)), "TD string (no context)"); Assert.True(_imgConvFrmTD.CanConvertTo(null, typeof(string)), "TD string"); Assert.True(_imgConvFrmTD.CanConvertTo(null, _imageStr.GetType()), "TD _imageStr.GetType()"); Assert.True(_imgConvFrmTD.CanConvertTo(typeof(byte[])), "TD byte[] (no context)"); Assert.True(_imgConvFrmTD.CanConvertTo(null, typeof(byte[])), "TD byte[]"); Assert.True(_imgConvFrmTD.CanConvertTo(null, _imageBytes.GetType()), "TD _imageBytes.GetType()"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(Rectangle)), "TD Rectangle"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(Point)), "TD Point"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(PointF)), "TD PointF"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(Size)), "TD Size"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(SizeF)), "TD SizeF"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(object)), "TD object"); Assert.False(_imgConvFrmTD.CanConvertTo(null, typeof(int)), "TD int"); } [ConditionalFact(Helpers.IsDrawingSupported)] public void ConvertFrom() { Image newImage = (Image)_imgConv.ConvertFrom(null, CultureInfo.InvariantCulture, _imageBytes); Assert.Equal(_image.Height, newImage.Height); Assert.Equal(_image.Width, newImage.Width); Assert.Equal("(none)", _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, null, typeof(string))); newImage = (Image)_imgConvFrmTD.ConvertFrom(null, CultureInfo.InvariantCulture, _imageBytes); Assert.Equal(_image.Height, newImage.Height); Assert.Equal(_image.Width, newImage.Width); } [ConditionalFact(Helpers.IsDrawingSupported)] public void ConvertFrom_ThrowsNotSupportedException() { Assert.Throws<NotSupportedException>(() => _imgConv.ConvertFrom("System.Drawing.String")); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertFrom(null, CultureInfo.InvariantCulture, "System.Drawing.String")); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertFrom(null, CultureInfo.InvariantCulture, new Bitmap(20, 20))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertFrom(null, CultureInfo.InvariantCulture, new Point(10, 10))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertFrom(null, CultureInfo.InvariantCulture, new SizeF(10, 10))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertFrom(null, CultureInfo.InvariantCulture, new object())); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertFrom("System.Drawing.String")); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertFrom(null, CultureInfo.InvariantCulture, "System.Drawing.String")); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertFrom(null, CultureInfo.InvariantCulture, new Bitmap(20, 20))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertFrom(null, CultureInfo.InvariantCulture, new Point(10, 10))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertFrom(null, CultureInfo.InvariantCulture, new SizeF(10, 10))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertFrom(null, CultureInfo.InvariantCulture, new object())); } [ConditionalFact(Helpers.IsDrawingSupported)] public void ConvertTo_String() { Assert.Equal(_imageStr, (string)_imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(string))); Assert.Equal(_imageStr, (string)_imgConv.ConvertTo(_image, typeof(string))); Assert.Equal(_imageStr, (string)_imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(string))); Assert.Equal(_imageStr, (string)_imgConvFrmTD.ConvertTo(_image, typeof(string))); } [ConditionalFact(Helpers.IsDrawingSupported)] public void ConvertTo_ByteArray() { byte[] newImageBytes = (byte[])_imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, _imageBytes.GetType()); Assert.Equal(_imageBytes, newImageBytes); newImageBytes = (byte[])_imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, _imageBytes.GetType()); Assert.Equal(_imageBytes, newImageBytes); newImageBytes = (byte[])_imgConvFrmTD.ConvertTo(_image, _imageBytes.GetType()); Assert.Equal(_imageBytes, newImageBytes); } [ConditionalFact(Helpers.IsDrawingSupported)] public void ConvertTo_FromBitmapToByteArray() { Bitmap value = new Bitmap(64, 64); ImageConverter converter = new ImageConverter(); byte[] converted = (byte[])converter.ConvertTo(value, typeof(byte[])); Assert.NotNull(converted); } [ConditionalFact(Helpers.IsDrawingSupported)] public void ConvertTo_ThrowsNotSupportedException() { Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Rectangle))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, _image.GetType())); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Size))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Bitmap))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Point))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Metafile))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(object))); Assert.Throws<NotSupportedException>(() => _imgConv.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(int))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Rectangle))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, _image.GetType())); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Size))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Bitmap))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Point))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(Metafile))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(object))); Assert.Throws<NotSupportedException>(() => _imgConvFrmTD.ConvertTo(null, CultureInfo.InvariantCulture, _image, typeof(int))); } [ConditionalFact(Helpers.IsDrawingSupported)] public void TestGetPropertiesSupported() { Assert.True(_imgConv.GetPropertiesSupported(), "GetPropertiesSupported()"); Assert.True(_imgConv.GetPropertiesSupported(null), "GetPropertiesSupported(null)"); } [ConditionalFact(Helpers.IsDrawingSupported)] public void TestGetProperties() { const int allPropertiesCount = 14; // Count of all properties in Image class. const int browsablePropertiesCount = 7; // Count of browsable properties in Image class (BrowsableAttribute.Yes). PropertyDescriptorCollection propsColl; // Internally calls TypeDescriptor.GetProperties(typeof(Image), null), which returns all properties. propsColl = _imgConv.GetProperties(null, _image, null); Assert.Equal(allPropertiesCount, propsColl.Count); // Internally calls TypeDescriptor.GetProperties(typeof(Image), new Attribute[] { BrowsableAttribute.Yes }). propsColl = _imgConv.GetProperties(null, _image); Assert.Equal(browsablePropertiesCount, propsColl.Count); propsColl = _imgConv.GetProperties(_image); Assert.Equal(browsablePropertiesCount, propsColl.Count); // Returns all properties of Image class. propsColl = TypeDescriptor.GetProperties(typeof(Image)); Assert.Equal(allPropertiesCount, propsColl.Count); // Internally calls TypeDescriptor.GetProperties(typeof(Image), null), which returns all properties. propsColl = _imgConvFrmTD.GetProperties(null, _image, null); Assert.Equal(allPropertiesCount, propsColl.Count); // Internally calls TypeDescriptor.GetProperties(typeof(Image), new Attribute[] { BrowsableAttribute.Yes }). propsColl = _imgConvFrmTD.GetProperties(null, _image); Assert.Equal(browsablePropertiesCount, propsColl.Count); propsColl = _imgConvFrmTD.GetProperties(_image); Assert.Equal(browsablePropertiesCount, propsColl.Count); } } }
#region S# License /****************************************************************************************** NOTICE!!! This program and source code is owned and licensed by StockSharp, LLC, www.stocksharp.com Viewing or use of this code requires your acceptance of the license agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE Removal of this comment is a violation of the license agreement. Project: StockSharp.Algo.Export.Algo File: ExcelExporter.cs Created: 2015, 11, 11, 2:32 PM Copyright 2010 by StockSharp, LLC *******************************************************************************************/ #endregion S# License namespace StockSharp.Algo.Export { using System; using System.Collections.Generic; using System.Linq; using System.Windows.Media; using Ecng.Collections; using Ecng.Common; using Ecng.ComponentModel; using Ecng.Interop; using StockSharp.BusinessEntities; using StockSharp.Messages; using StockSharp.Localization; /// <summary> /// The export into Excel. /// </summary> public class ExcelExporter : BaseExporter { private readonly Action _breaked; /// <summary> /// Initializes a new instance of the <see cref="ExcelExporter"/>. /// </summary> /// <param name="security">Security.</param> /// <param name="arg">The data parameter.</param> /// <param name="isCancelled">The processor, returning export interruption sign.</param> /// <param name="fileName">The path to file.</param> /// <param name="breaked">The processor, which will be called if maximal value of strings is exceeded.</param> public ExcelExporter(Security security, object arg, Func<int, bool> isCancelled, string fileName, Action breaked) : base(security, arg, isCancelled, fileName) { if (breaked == null) throw new ArgumentNullException(nameof(breaked)); _breaked = breaked; } /// <summary> /// To export <see cref="ExecutionMessage"/>. /// </summary> /// <param name="messages">Messages.</param> protected override void Export(IEnumerable<ExecutionMessage> messages) { switch ((ExecutionTypes)Arg) { case ExecutionTypes.Tick: { Do(worker => { worker .SetCell(0, 0, LocalizedStrings.Id).SetStyle(0, typeof(string)) .SetCell(1, 0, LocalizedStrings.Time).SetStyle(1, "yyyy-MM-dd HH:mm:ss.fff zzz") .SetCell(2, 0, LocalizedStrings.Price).SetStyle(2, typeof(decimal)) .SetCell(3, 0, LocalizedStrings.Volume).SetStyle(3, typeof(decimal)) .SetCell(4, 0, LocalizedStrings.Str128) .SetCell(5, 0, LocalizedStrings.OI).SetStyle(5, typeof(decimal)) .SetCell(6, 0, "UP_DOWN").SetStyle(5, typeof(bool)); worker.SetConditionalFormatting(4, ComparisonOperator.Equal, "\"{0}\"".Put(Sides.Buy), null, Colors.Green); worker.SetConditionalFormatting(4, ComparisonOperator.Equal, "\"{0}\"".Put(Sides.Sell), null, Colors.Red); var index = 1; foreach (var message in messages) { worker .SetCell(0, index, message.TradeId == null ? message.TradeStringId : message.TradeId.To<string>()) .SetCell(1, index, message.ServerTime) .SetCell(2, index, message.TradePrice) .SetCell(3, index, message.TradeVolume) .SetCell(4, index, message.OriginSide) .SetCell(5, index, message.OpenInterest) .SetCell(6, index, message.IsUpTick); index++; if (!Check(index)) break; } }); break; } case ExecutionTypes.OrderLog: { Do(worker => { worker .SetCell(0, 0, LocalizedStrings.Id).SetStyle(0, typeof(string)) .SetCell(1, 0, LocalizedStrings.Time).SetStyle(1, "yyyy-MM-dd HH:mm:ss.fff zzz") .SetCell(2, 0, LocalizedStrings.Price).SetStyle(2, typeof(decimal)) .SetCell(3, 0, LocalizedStrings.Volume).SetStyle(3, typeof(decimal)) .SetCell(4, 0, LocalizedStrings.Str128) .SetCell(5, 0, LocalizedStrings.Str722) .SetCell(6, 0, LocalizedStrings.Type) .SetCell(7, 0, LocalizedStrings.Str342) .SetCell(8, 0, LocalizedStrings.Str723).SetStyle(8, typeof(string)) .SetCell(9, 0, LocalizedStrings.Str724).SetStyle(9, typeof(decimal)) .SetCell(10, 0, LocalizedStrings.Str725).SetStyle(10, typeof(decimal)); worker.SetConditionalFormatting(4, ComparisonOperator.Equal, "\"{0}\"".Put(Sides.Buy), null, Colors.Green); worker.SetConditionalFormatting(4, ComparisonOperator.Equal, "\"{0}\"".Put(Sides.Sell), null, Colors.Red); var index = 1; foreach (var message in messages) { worker .SetCell(0, index, message.OrderId == null ? message.OrderStringId : message.OrderId.To<string>()) .SetCell(1, index, message.ServerTime) .SetCell(2, index, message.OrderPrice) .SetCell(3, index, message.OrderVolume) .SetCell(4, index, message.Side) .SetCell(5, index, message.OrderState) .SetCell(6, index, message.TimeInForce) .SetCell(7, index, message.IsSystem); if (message.TradePrice != null) { worker .SetCell(8, index, message.TradeId == null ? message.TradeStringId : message.TradeId.To<string>()) .SetCell(9, index, message.TradePrice) .SetCell(10, index, message.OpenInterest); } index++; if (!Check(index)) break; } }); break; } case ExecutionTypes.Transaction: { Do(worker => { worker .SetCell(0, 0, LocalizedStrings.Time).SetStyle(1, "yyyy-MM-dd HH:mm:ss.fff zzz") .SetCell(1, 0, LocalizedStrings.Portfolio) .SetCell(2, 0, LocalizedStrings.TransactionId) .SetCell(3, 0, LocalizedStrings.OrderId) .SetCell(4, 0, LocalizedStrings.Price).SetStyle(2, typeof(decimal)) .SetCell(5, 0, LocalizedStrings.Volume).SetStyle(3, typeof(decimal)) .SetCell(6, 0, LocalizedStrings.Str1325).SetStyle(3, typeof(decimal)) .SetCell(7, 0, LocalizedStrings.Str128) .SetCell(8, 0, LocalizedStrings.Str132) .SetCell(9, 0, LocalizedStrings.Str134) .SetCell(10, 0, LocalizedStrings.Str506) .SetCell(11, 0, LocalizedStrings.TradePrice).SetStyle(3, typeof(decimal)); worker.SetConditionalFormatting(7, ComparisonOperator.Equal, "\"{0}\"".Put(Sides.Buy), null, Colors.Green); worker.SetConditionalFormatting(7, ComparisonOperator.Equal, "\"{0}\"".Put(Sides.Sell), null, Colors.Red); worker.SetConditionalFormatting(9, ComparisonOperator.Equal, "\"{0}\"".Put(OrderStates.Active), null, Colors.Blue); worker.SetConditionalFormatting(9, ComparisonOperator.Equal, "\"{0}\"".Put(OrderStates.Done), null, Colors.Green); worker.SetConditionalFormatting(9, ComparisonOperator.Equal, "\"{0}\"".Put(OrderStates.Failed), null, Colors.Red); var index = 1; foreach (var message in messages) { worker .SetCell(0, index, message.ServerTime) .SetCell(1, index, message.PortfolioName) .SetCell(2, index, message.TransactionId) .SetCell(3, index, message.OrderId == null ? message.OrderStringId : message.OrderId.To<string>()) .SetCell(4, index, message.OrderPrice) .SetCell(5, index, message.OrderVolume) .SetCell(6, index, message.Balance) .SetCell(7, index, message.Side) .SetCell(8, index, message.OrderType) .SetCell(9, index, message.OrderState) .SetCell(10, index, message.TradeId == null ? message.TradeStringId : message.TradeId.To<string>()) .SetCell(11, index, message.TradePrice) .SetCell(12, index, message.HasOrderInfo) .SetCell(13, index, message.HasTradeInfo); index++; if (!Check(index)) break; } }); break; } default: throw new ArgumentOutOfRangeException(); } } /// <summary> /// To export <see cref="QuoteChangeMessage"/>. /// </summary> /// <param name="messages">Messages.</param> protected override void Export(IEnumerable<QuoteChangeMessage> messages) { Do(worker => { var rowIndex = 0; foreach (var message in messages) { worker .SetCell(0, rowIndex, LocalizedStrings.Time) .SetCell(1, rowIndex, message.ServerTime); var columnIndex = 0; foreach (var quote in message.Bids.Concat(message.Asks).OrderByDescending(q => q.Price)) { worker .SetCell(columnIndex, rowIndex + (quote.Side == Sides.Buy ? 1 : 3), quote.Price) .SetCell(columnIndex, rowIndex + 2, quote.Volume); columnIndex++; } rowIndex += 4; if (!Check(rowIndex)) break; } }); } /// <summary> /// To export <see cref="Level1ChangeMessage"/>. /// </summary> /// <param name="messages">Messages.</param> protected override void Export(IEnumerable<Level1ChangeMessage> messages) { Do(worker => { var columns = new Dictionary<Level1Fields, int>(); //{ // { Level1Fields.LastTradeId, 1 }, // { Level1Fields.LastTradePrice, 2 }, // { Level1Fields.LastTradeVolume, 3 }, // { Level1Fields.LastTradeOrigin, 4 }, // { Level1Fields.BestBidPrice, 5 }, // { Level1Fields.BestBidVolume, 6 }, // { Level1Fields.BestAskPrice, 7 }, // { Level1Fields.BestAskVolume, 8 }, // { Level1Fields.StepPrice, 9 }, // { Level1Fields.OpenInterest, 10 }, // { Level1Fields.TheorPrice, 11 }, // { Level1Fields.ImpliedVolatility, 12 }, // { Level1Fields.OpenPrice, 13 }, // { Level1Fields.HighPrice, 14 }, // { Level1Fields.LowPrice, 15 }, // { Level1Fields.ClosePrice, 16 }, // { Level1Fields.Volume, 17 }, //}; worker .SetCell(0, 0, LocalizedStrings.Time).SetStyle(0, "yyyy-MM-dd HH:mm:ss.fff"); //foreach (var pair in columns) //{ // var field = pair.Key; // var columnIndex = pair.Value; // worker.SetCell(columnIndex, 0, field.GetDisplayName()); // ApplyCellStyle(worker, field, columnIndex); //} var row = 1; foreach (var message in messages) { worker.SetCell(0, row, message.LocalTime); foreach (var pair in message.Changes) { var field = pair.Key; var columnIndex = columns.TryGetValue2(field); if (columnIndex == null) { columnIndex = columns.Count; columns.Add(field, columnIndex.Value); worker.SetCell(columnIndex.Value, 0, field.GetDisplayName()); ApplyCellStyle(worker, field, columnIndex.Value); } worker.SetCell(columns[field], row, pair.Value); } if (!Check(++row)) break; } }); } private static void ApplyCellStyle(ExcelWorker worker, Level1Fields field, int column) { switch (field) { case Level1Fields.LastTrade: case Level1Fields.BestAsk: case Level1Fields.BestBid: break; case Level1Fields.LastTradeId: case Level1Fields.BidsCount: case Level1Fields.AsksCount: case Level1Fields.TradesCount: worker.SetStyle(column, typeof(long)); break; case Level1Fields.LastTradeTime: case Level1Fields.BestAskTime: case Level1Fields.BestBidTime: worker.SetStyle(column, typeof(DateTimeOffset)); break; default: worker.SetStyle(column, typeof(decimal)); break; } } /// <summary> /// To export <see cref="CandleMessage"/>. /// </summary> /// <param name="messages">Messages.</param> protected override void Export(IEnumerable<CandleMessage> messages) { Do(worker => { worker .SetCell(0, 0, LocalizedStrings.Str726).SetStyle(0, "yyyy-MM-dd HH:mm:ss.fff") .SetCell(1, 0, LocalizedStrings.Str727).SetStyle(1, "yyyy-MM-dd HH:mm:ss.fff") .SetCell(2, 0, "O").SetStyle(2, typeof(decimal)) .SetCell(3, 0, "H").SetStyle(3, typeof(decimal)) .SetCell(4, 0, "L").SetStyle(4, typeof(decimal)) .SetCell(5, 0, "C").SetStyle(5, typeof(decimal)) .SetCell(6, 0, "V").SetStyle(6, typeof(decimal)) .SetCell(7, 0, LocalizedStrings.OI).SetStyle(7, typeof(decimal)); var index = 1; foreach (var candle in messages) { worker .SetCell(0, index, candle.OpenTime) .SetCell(1, index, candle.CloseTime) .SetCell(2, index, candle.OpenPrice) .SetCell(3, index, candle.HighPrice) .SetCell(4, index, candle.LowPrice) .SetCell(5, index, candle.ClosePrice) .SetCell(6, index, candle.TotalVolume) .SetCell(7, index, candle.OpenInterest); index++; if (!Check(index)) break; } }); } /// <summary> /// To export <see cref="NewsMessage"/>. /// </summary> /// <param name="messages">Messages.</param> protected override void Export(IEnumerable<NewsMessage> messages) { Do(worker => { worker .SetCell(0, 0, LocalizedStrings.Id).SetStyle(0, typeof(string)) .SetCell(1, 0, LocalizedStrings.Time).SetStyle(1, "yyyy-MM-dd HH:mm:ss.fff") .SetCell(2, 0, LocalizedStrings.Security).SetStyle(2, typeof(string)) .SetCell(3, 0, LocalizedStrings.Board).SetStyle(3, typeof(string)) .SetCell(4, 0, LocalizedStrings.Str215).SetStyle(4, typeof(string)) .SetCell(5, 0, LocalizedStrings.Str217).SetStyle(5, typeof(string)) .SetCell(6, 0, LocalizedStrings.Str213).SetStyle(6, typeof(string)) .SetCell(7, 0, LocalizedStrings.Str221).SetStyle(6, typeof(string)); var index = 1; foreach (var n in messages) { worker .SetCell(0, index, n.Id) .SetCell(1, index, n.ServerTime) .SetCell(2, index, n.SecurityId == null ? null : n.SecurityId.Value.SecurityCode) .SetCell(3, index, n.BoardCode) .SetCell(4, index, n.Headline) .SetCell(5, index, n.Story) .SetCell(6, index, n.Source) .SetCell(7, index, n.Url); index++; if (!Check(index)) break; } }); } /// <summary> /// To export <see cref="SecurityMessage"/>. /// </summary> /// <param name="messages">Messages.</param> protected override void Export(IEnumerable<SecurityMessage> messages) { Do(worker => { var colIndex = 0; worker .SetCell(colIndex, 0, LocalizedStrings.Code).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.Board).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.Name).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.Str363).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.PriceStep).SetStyle(colIndex++, typeof(decimal)) .SetCell(colIndex, 0, LocalizedStrings.VolumeStep).SetStyle(colIndex++, typeof(decimal)) .SetCell(colIndex, 0, LocalizedStrings.Str330).SetStyle(colIndex++, typeof(decimal)) .SetCell(colIndex, 0, LocalizedStrings.Type).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.Decimals).SetStyle(colIndex++, typeof(decimal)) .SetCell(colIndex, 0, LocalizedStrings.Str551).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.Strike).SetStyle(colIndex++, typeof(decimal)) .SetCell(colIndex, 0, LocalizedStrings.UnderlyingAsset).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, LocalizedStrings.ExpiryDate).SetStyle(colIndex++, "yyyy-MM-dd") .SetCell(colIndex, 0, LocalizedStrings.SettlementDate).SetStyle(colIndex++, "yyyy-MM-dd") .SetCell(colIndex, 0, LocalizedStrings.Currency).SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "Bloomberg").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "CUSIP").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "IQFeed").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "InteractiveBrokers").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "ISIN").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "Plaza").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "RIC").SetStyle(colIndex++, typeof(string)) .SetCell(colIndex, 0, "SEDOL").SetStyle(colIndex, typeof(string)); var rowIndex = 1; foreach (var security in messages) { colIndex = 0; worker .SetCell(colIndex++, rowIndex, security.SecurityId.SecurityCode) .SetCell(colIndex++, rowIndex, security.SecurityId.BoardCode) .SetCell(colIndex++, rowIndex, security.Name) .SetCell(colIndex++, rowIndex, security.ShortName) .SetCell(colIndex++, rowIndex, security.PriceStep) .SetCell(colIndex++, rowIndex, security.VolumeStep) .SetCell(colIndex++, rowIndex, security.Multiplier) .SetCell(colIndex++, rowIndex, security.SecurityType == null ? string.Empty : security.SecurityType.Value.GetDisplayName()) .SetCell(colIndex++, rowIndex, security.Decimals) .SetCell(colIndex++, rowIndex, security.OptionType == null ? string.Empty : security.OptionType.Value.GetDisplayName()) .SetCell(colIndex++, rowIndex, security.Strike) .SetCell(colIndex++, rowIndex, security.BinaryOptionType) .SetCell(colIndex++, rowIndex, security.UnderlyingSecurityCode) .SetCell(colIndex++, rowIndex, security.ExpiryDate) .SetCell(colIndex++, rowIndex, security.SettlementDate) .SetCell(colIndex++, rowIndex, security.Currency == null ? string.Empty : security.Currency.Value.GetDisplayName()) .SetCell(colIndex++, rowIndex, security.SecurityId.Bloomberg) .SetCell(colIndex++, rowIndex, security.SecurityId.Cusip) .SetCell(colIndex++, rowIndex, security.SecurityId.IQFeed) .SetCell(colIndex++, rowIndex, security.SecurityId.InteractiveBrokers) .SetCell(colIndex++, rowIndex, security.SecurityId.Isin) .SetCell(colIndex++, rowIndex, security.SecurityId.Plaza) .SetCell(colIndex++, rowIndex, security.SecurityId.Ric) .SetCell(colIndex, rowIndex, security.SecurityId.Sedol); rowIndex++; if (!Check(rowIndex)) break; } }); } private void Do(Action<ExcelWorker> action) { if (action == null) throw new ArgumentNullException(nameof(action)); using (var worker = new ExcelWorker()) { action(worker); worker.Save(Path, false); } } private bool Check(int index) { // http://office.microsoft.com/en-us/excel-help/excel-specifications-and-limits-HA103980614.aspx if (index < 1048576) //if (index < (ushort.MaxValue - 1)) { return CanProcess(); } else { _breaked(); return false; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Reflection; using Microsoft.AspNetCore.Http.Headers; using Microsoft.Extensions.Primitives; using Microsoft.Net.Http.Headers; namespace Microsoft.AspNetCore.Http { /// <summary> /// Extension methods for accessing strongly typed HTTP request and response /// headers. /// </summary> public static class HeaderDictionaryTypeExtensions { /// <summary> /// Gets strongly typed HTTP request headers. /// </summary> /// <param name="request">The <see cref="HttpRequest"/>.</param> /// <returns>The <see cref="RequestHeaders"/>.</returns> public static RequestHeaders GetTypedHeaders(this HttpRequest request) { return new RequestHeaders(request.Headers); } /// <summary> /// Gets strongly typed HTTP response headers. /// </summary> /// <param name="response">The <see cref="HttpResponse"/>.</param> /// <returns>The <see cref="ResponseHeaders"/>.</returns> public static ResponseHeaders GetTypedHeaders(this HttpResponse response) { return new ResponseHeaders(response.Headers); } // These are all shared helpers used by both RequestHeaders and ResponseHeaders internal static DateTimeOffset? GetDate(this IHeaderDictionary headers, string name) { if (headers == null) { throw new ArgumentNullException(nameof(headers)); } if (name == null) { throw new ArgumentNullException(nameof(name)); } return headers.Get<DateTimeOffset?>(name); } internal static void Set(this IHeaderDictionary headers, string name, object? value) { if (headers == null) { throw new ArgumentNullException(nameof(headers)); } if (name == null) { throw new ArgumentNullException(nameof(name)); } if (value == null) { headers.Remove(name); } else { headers[name] = value.ToString(); } } internal static void SetList<T>(this IHeaderDictionary headers, string name, IList<T>? values) { if (headers == null) { throw new ArgumentNullException(nameof(headers)); } if (name == null) { throw new ArgumentNullException(nameof(name)); } if (values == null || values.Count == 0) { headers.Remove(name); } else if (values.Count == 1) { headers[name] = new StringValues(values[0]!.ToString()); } else { var newValues = new string[values.Count]; for (var i = 0; i < values.Count; i++) { newValues[i] = values[i]!.ToString()!; } headers[name] = new StringValues(newValues); } } /// <summary> /// Appends a sequence of values to <see cref="IHeaderDictionary"/>. /// </summary> /// <typeparam name="T">The type of header value.</typeparam> /// <param name="Headers">The <see cref="IHeaderDictionary"/>.</param> /// <param name="name">The header name.</param> /// <param name="values">The values to append.</param> public static void AppendList<T>(this IHeaderDictionary Headers, string name, IList<T> values) { if (name == null) { throw new ArgumentNullException(nameof(name)); } if (values == null) { throw new ArgumentNullException(nameof(values)); } switch (values.Count) { case 0: Headers.Append(name, StringValues.Empty); break; case 1: Headers.Append(name, new StringValues(values[0]!.ToString())); break; default: var newValues = new string[values.Count]; for (var i = 0; i < values.Count; i++) { newValues[i] = values[i]!.ToString()!; } Headers.Append(name, new StringValues(newValues)); break; } } internal static void SetDate(this IHeaderDictionary headers, string name, DateTimeOffset? value) { if (headers == null) { throw new ArgumentNullException(nameof(headers)); } if (name == null) { throw new ArgumentNullException(nameof(name)); } if (value.HasValue) { headers[name] = HeaderUtilities.FormatDate(value.GetValueOrDefault()); } else { headers.Remove(name); } } private static readonly IDictionary<Type, object> KnownParsers = new Dictionary<Type, object>() { { typeof(CacheControlHeaderValue), new Func<string, CacheControlHeaderValue?>(value => { return CacheControlHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(ContentDispositionHeaderValue), new Func<string, ContentDispositionHeaderValue?>(value => { return ContentDispositionHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(ContentRangeHeaderValue), new Func<string, ContentRangeHeaderValue?>(value => { return ContentRangeHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(MediaTypeHeaderValue), new Func<string, MediaTypeHeaderValue?>(value => { return MediaTypeHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(RangeConditionHeaderValue), new Func<string, RangeConditionHeaderValue?>(value => { return RangeConditionHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(RangeHeaderValue), new Func<string, RangeHeaderValue?>(value => { return RangeHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(EntityTagHeaderValue), new Func<string, EntityTagHeaderValue?>(value => { return EntityTagHeaderValue.TryParse(value, out var result) ? result : null; }) }, { typeof(DateTimeOffset?), new Func<string, DateTimeOffset?>(value => { return HeaderUtilities.TryParseDate(value, out var result) ? result : null; }) }, { typeof(long?), new Func<string, long?>(value => { return HeaderUtilities.TryParseNonNegativeInt64(value, out var result) ? result : null; }) }, }; private static readonly IDictionary<Type, object> KnownListParsers = new Dictionary<Type, object>() { { typeof(MediaTypeHeaderValue), new Func<IList<string>, IList<MediaTypeHeaderValue>>(value => { return MediaTypeHeaderValue.TryParseList(value, out var result) ? result : Array.Empty<MediaTypeHeaderValue>(); }) }, { typeof(StringWithQualityHeaderValue), new Func<IList<string>, IList<StringWithQualityHeaderValue>>(value => { return StringWithQualityHeaderValue.TryParseList(value, out var result) ? result : Array.Empty<StringWithQualityHeaderValue>(); }) }, { typeof(CookieHeaderValue), new Func<IList<string>, IList<CookieHeaderValue>>(value => { return CookieHeaderValue.TryParseList(value, out var result) ? result : Array.Empty<CookieHeaderValue>(); }) }, { typeof(EntityTagHeaderValue), new Func<IList<string>, IList<EntityTagHeaderValue>>(value => { return EntityTagHeaderValue.TryParseList(value, out var result) ? result : Array.Empty<EntityTagHeaderValue>(); }) }, { typeof(SetCookieHeaderValue), new Func<IList<string>, IList<SetCookieHeaderValue>>(value => { return SetCookieHeaderValue.TryParseList(value, out var result) ? result : Array.Empty<SetCookieHeaderValue>(); }) }, }; internal static T? Get<T>(this IHeaderDictionary headers, string name) { if (headers == null) { throw new ArgumentNullException(nameof(headers)); } var value = headers[name]; if (StringValues.IsNullOrEmpty(value)) { return default(T); } if (KnownParsers.TryGetValue(typeof(T), out var temp)) { var func = (Func<string, T>)temp; return func(value.ToString()); } return GetViaReflection<T>(value.ToString()); } internal static IList<T> GetList<T>(this IHeaderDictionary headers, string name) { if (headers == null) { throw new ArgumentNullException(nameof(headers)); } var values = headers[name]; return GetList<T>(values); } internal static IList<T> GetList<T>(this StringValues values) { if (StringValues.IsNullOrEmpty(values)) { return Array.Empty<T>(); } if (KnownListParsers.TryGetValue(typeof(T), out var temp)) { var func = (Func<IList<string>, IList<T>>)temp; return func(values); } return GetListViaReflection<T>(values); } private static T? GetViaReflection<T>(string value) { // TODO: Cache the reflected type for later? Only if success? var type = typeof(T); var method = type.GetMethods(BindingFlags.Public | BindingFlags.Static) .FirstOrDefault(methodInfo => { if (string.Equals("TryParse", methodInfo.Name, StringComparison.Ordinal) && methodInfo.ReturnParameter.ParameterType.Equals(typeof(bool))) { var methodParams = methodInfo.GetParameters(); return methodParams.Length == 2 && methodParams[0].ParameterType.Equals(typeof(string)) && methodParams[1].IsOut && methodParams[1].ParameterType.Equals(type.MakeByRefType()); } return false; }); if (method == null) { throw new NotSupportedException(string.Format( CultureInfo.CurrentCulture, "The given type '{0}' does not have a TryParse method with the required signature 'public static bool TryParse(string, out {0}).", nameof(T))); } var parameters = new object?[] { value, null }; var success = (bool)method.Invoke(null, parameters)!; if (success) { return (T?)parameters[1]; } return default(T); } private static IList<T> GetListViaReflection<T>(StringValues values) { // TODO: Cache the reflected type for later? Only if success? var type = typeof(T); var method = type.GetMethods(BindingFlags.Public | BindingFlags.Static) .FirstOrDefault(methodInfo => { if (string.Equals("TryParseList", methodInfo.Name, StringComparison.Ordinal) && methodInfo.ReturnParameter.ParameterType.Equals(typeof(Boolean))) { var methodParams = methodInfo.GetParameters(); return methodParams.Length == 2 && methodParams[0].ParameterType.Equals(typeof(IList<string>)) && methodParams[1].IsOut && methodParams[1].ParameterType.Equals(typeof(IList<T>).MakeByRefType()); } return false; }); if (method == null) { throw new NotSupportedException(string.Format( CultureInfo.CurrentCulture, "The given type '{0}' does not have a TryParseList method with the required signature 'public static bool TryParseList(IList<string>, out IList<{0}>).", nameof(T))); } var parameters = new object?[] { values, null }; var success = (bool)method.Invoke(null, parameters)!; if (success) { return (IList<T>)parameters[1]!; } return Array.Empty<T>(); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using System.Collections.Generic; using System.Linq; using Avalonia.VisualTree; namespace Avalonia.Input.Navigation { /// <summary> /// The implementation for default tab navigation. /// </summary> internal static class TabNavigation { /// <summary> /// Gets the next control in the specified tab direction. /// </summary> /// <param name="element">The element.</param> /// <param name="direction">The tab direction. Must be Next or Previous.</param> /// <returns> /// The next element in the specified direction, or null if <paramref name="element"/> /// was the last in the requested direction. /// </returns> public static IInputElement GetNextInTabOrder( IInputElement element, NavigationDirection direction) { Contract.Requires<ArgumentNullException>(element != null); Contract.Requires<ArgumentException>( direction == NavigationDirection.Next || direction == NavigationDirection.Previous); var container = element.GetVisualParent<IInputElement>(); if (container != null) { var mode = KeyboardNavigation.GetTabNavigation((InputElement)container); switch (mode) { case KeyboardNavigationMode.Continue: return GetNextInContainer(element, container, direction) ?? GetFirstInNextContainer(element, direction); case KeyboardNavigationMode.Cycle: return GetNextInContainer(element, container, direction) ?? GetFocusableDescendent(container, direction); case KeyboardNavigationMode.Contained: return GetNextInContainer(element, container, direction); default: return GetFirstInNextContainer(container, direction); } } else { return GetFocusableDescendents(element).FirstOrDefault(); } } /// <summary> /// Gets the first or last focusable descendent of the specified element. /// </summary> /// <param name="container">The element.</param> /// <param name="direction">The direction to search.</param> /// <returns>The element or null if not found.##</returns> private static IInputElement GetFocusableDescendent(IInputElement container, NavigationDirection direction) { return direction == NavigationDirection.Next ? GetFocusableDescendents(container).FirstOrDefault() : GetFocusableDescendents(container).LastOrDefault(); } /// <summary> /// Gets the focusable descendents of the specified element. /// </summary> /// <param name="element">The element.</param> /// <returns>The element's focusable descendents.</returns> private static IEnumerable<IInputElement> GetFocusableDescendents(IInputElement element) { var mode = KeyboardNavigation.GetTabNavigation((InputElement)element); if (mode == KeyboardNavigationMode.None) { yield break; } var children = element.GetVisualChildren().OfType<IInputElement>(); if (mode == KeyboardNavigationMode.Once) { var active = KeyboardNavigation.GetTabOnceActiveElement((InputElement)element); if (active != null) { yield return active; yield break; } else { children = children.Take(1); } } foreach (var child in children) { if (child.CanFocus()) { yield return child; } if (child.CanFocusDescendents()) { foreach (var descendent in GetFocusableDescendents(child)) { yield return descendent; } } } } /// <summary> /// Gets the next item that should be focused in the specified container. /// </summary> /// <param name="element">The starting element/</param> /// <param name="container">The container.</param> /// <param name="direction">The direction.</param> /// <returns>The next element, or null if the element is the last.</returns> private static IInputElement GetNextInContainer( IInputElement element, IInputElement container, NavigationDirection direction) { if (direction == NavigationDirection.Next) { var descendent = GetFocusableDescendents(element).FirstOrDefault(); if (descendent != null) { return descendent; } } if (container != null) { var navigable = container as INavigableContainer; // TODO: Do a spatial search here if the container doesn't implement // INavigableContainer. if (navigable != null) { while (element != null) { element = navigable.GetControl(direction, element); if (element != null && element.CanFocus()) { break; } } } else { // TODO: Do a spatial search here if the container doesn't implement // INavigableContainer. element = null; } if (element != null && direction == NavigationDirection.Previous) { var descendent = GetFocusableDescendents(element).LastOrDefault(); if (descendent != null) { return descendent; } } return element; } return null; } /// <summary> /// Gets the first item that should be focused in the next container. /// </summary> /// <param name="container">The container.</param> /// <param name="direction">The direction of the search.</param> /// <returns>The first element, or null if there are no more elements.</returns> private static IInputElement GetFirstInNextContainer( IInputElement container, NavigationDirection direction) { var parent = container.GetVisualParent<IInputElement>(); IInputElement next = null; if (parent != null) { if (direction == NavigationDirection.Previous && parent.CanFocus()) { return parent; } var siblings = parent.GetVisualChildren() .OfType<IInputElement>() .Where(FocusExtensions.CanFocusDescendents); var sibling = direction == NavigationDirection.Next ? siblings.SkipWhile(x => x != container).Skip(1).FirstOrDefault() : siblings.TakeWhile(x => x != container).LastOrDefault(); if (sibling != null) { if (sibling.CanFocus()) { next = sibling; } else { next = direction == NavigationDirection.Next ? GetFocusableDescendents(sibling).FirstOrDefault() : GetFocusableDescendents(sibling).LastOrDefault(); } } if (next == null) { next = GetFirstInNextContainer(parent, direction); } } else { next = direction == NavigationDirection.Next ? GetFocusableDescendents(container).FirstOrDefault() : GetFocusableDescendents(container).LastOrDefault(); } return next; } } }
using hw.DebugFormatter; using Reni.Basics; using Reni.Parser; using Reni.SyntaxTree; using Reni.TokenClasses; namespace Reni.FeatureTest.Helper { abstract class LikeSyntax : DumpableObject { internal abstract void AssertLike(BinaryTree target); internal abstract void AssertLike(ValueSyntax syntax); public static LikeSyntax Null => new Empty(); public LikeSyntax DumpPrint => Expression("dump_print"); public static LikeSyntax Number(int i) => new Number(i); public static LikeSyntax Expression(LikeSyntax s1, string s2, LikeSyntax s3) => new Expression(s1, s2, s3); public static LikeSyntax Compound(LikeSyntax[] list, Declaration[] declarations, int[] converters) => new Struct(list, declarations, converters); public static LikeSyntax operator +(LikeSyntax x, LikeSyntax y) => x.Expression("+", y); public static LikeSyntax operator -(LikeSyntax x, LikeSyntax y) => x.Expression("-", y); public static LikeSyntax operator *(LikeSyntax x, LikeSyntax y) => x.Expression("*", y); public static LikeSyntax operator /(LikeSyntax x, LikeSyntax y) => x.Expression("/", y); public static Declaration Declaration(string name, int position) => new(name, position); public static LikeSyntax Symbol(string s) => new Expression(null, s, null); public LikeSyntax Expression(string s2, LikeSyntax s3) => new Expression(this, s2, s3); public LikeSyntax Expression(string s2) => new Expression(this, s2, null); public LikeSyntax Brackets(ITokenClass tokenClass = null) => new Brackets(this, tokenClass); } class Brackets : LikeSyntax { readonly LikeSyntax Target; readonly ITokenClass TokenClass; public Brackets(LikeSyntax target, ITokenClass tokenClass) { Target = target; TokenClass = tokenClass; } internal override void AssertLike(BinaryTree target) { (target.Left != null).Assert(); (target.TokenClass is RightParenthesis).Assert(); (target.Right == null).Assert(); (target.Left.Left == null).Assert(); (target.Left.TokenClass is LeftParenthesis).Assert(); target.Left.TokenClass.IsBelongingTo(target.TokenClass).Assert(); if(TokenClass != null) (target.TokenClass == TokenClass || target.Left.TokenClass == TokenClass).Assert(); Target.AssertLike(target.Left.Right); } internal override void AssertLike(ValueSyntax syntax) => (syntax is EmptyList).Assert(); } sealed class Empty : LikeSyntax { internal override void AssertLike(BinaryTree target) { (target.Left != null).Assert(); (target.TokenClass is RightParenthesis).Assert(); (target.Right == null).Assert(); (target.Left.Left == null).Assert(); (target.Left.TokenClass is LeftParenthesis).Assert(); (target.Right == null).Assert(); target.Left.TokenClass.IsBelongingTo(target.TokenClass).Assert(); } internal override void AssertLike(ValueSyntax syntax) => (syntax is EmptyList).Assert(); } sealed class Declaration { readonly string Name; readonly int Position; public Declaration(string name, int position) { Name = name; Position = position; } public void AssertContains(CompoundSyntax container) { var s = container.Find(Name, false); (s != null).Assert(); (Position == s.Value).Assert(); } } sealed class Struct : LikeSyntax { readonly int[] Converters; readonly Declaration[] Declarations; readonly LikeSyntax[] List; public Struct(LikeSyntax[] list, Declaration[] declarations, int[] converters) { List = list; Declarations = declarations; Converters = converters; } internal override void AssertLike(BinaryTree target) => NotImplementedMethod(target); internal override void AssertLike(ValueSyntax syntax) { var co = (CompoundSyntax)syntax; (List.Length == co.PureStatements.Length).Assert(); for(var i = 0; i < List.Length; i++) List[i].AssertLike(co.PureStatements[i]); (Declarations.Length == co.AllNames.Length).Assert(); foreach(var declaration in Declarations) declaration.AssertContains(co); (Converters.Length == co.ConverterStatementPositions.Length).Assert(); for(var i = 0; i < Converters.Length; i++) (Converters[i] == co.ConverterStatementPositions[i]).Assert(); } } sealed class Expression : LikeSyntax { [EnableDump] readonly LikeSyntax S1; [EnableDump] readonly string S2; [EnableDump] readonly LikeSyntax S3; public Expression(LikeSyntax s1, string s2, LikeSyntax s3) { S1 = s1; S2 = s2; S3 = s3; } internal override void AssertLike(BinaryTree target) { (target.TokenClass.Id == S2).Assert(() => $"\nTarget: {target.Dump()}\nPattern: {Dump()}"); AssertLike(S1, target.Left); AssertLike(S3, target.Right); } internal override void AssertLike(ValueSyntax syntax) { var ex = (ExpressionSyntax)syntax; AssertLike(S1, ex.Left); (ex.Definable?.Id == S2).Assert(); AssertLike(S3, ex.Right); } static void AssertLike(LikeSyntax syntax, ValueSyntax right) { if(syntax == null) (right == null).Assert(); else syntax.AssertLike(right); } static void AssertLike(LikeSyntax syntax, BinaryTree right) { if(syntax == null) (right == null).Assert(); else syntax.AssertLike(right); } } sealed class Number : LikeSyntax { readonly long I; internal Number(long i) => I = i; internal override void AssertLike(BinaryTree target) { (target.Left == null).Assert(); (target.Right == null).Assert(); (target.TokenClass is TokenClasses.Number).Assert(); (BitsConst.Convert(target.Token.Id).ToInt64() == I).Assert(); } internal override void AssertLike(ValueSyntax syntax) { var terminalSyntax = (TerminalSyntax)syntax; (terminalSyntax.Terminal is TokenClasses.Number).Assert(); (terminalSyntax.ToNumber == I).Assert(); } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using Microsoft.MixedReality.Toolkit.Input; using UnityEngine; using UnityEngine.Events; #if WINDOWS_UWP using Windows.Globalization; using Windows.UI.ViewManagement; using Microsoft.MixedReality.Toolkit.Utilities; using System.Collections; #endif namespace Microsoft.MixedReality.Toolkit.Experimental.UI { /// <summary> /// Base class for objects that wish to launch and hide a system keyboard specifically for Windows Mixed Reality /// devices (HoloLens 2, Windows Mixed Reality). /// /// Implements a workaround for UWP TouchScreenKeyboard bug which prevents /// UWP keyboard from showing up again after it is closed. /// Unity bug tracking the issue https://fogbugz.unity3d.com/default.asp?1137074_rttdnt8t1lccmtd3 /// </summary> /// <remarks> /// <para>If using Unity 2019 or 2020, make sure the version >= 2019.4.25 or 2020.3.2 to ensure the latest fixes for Unity keyboard bugs are present.</para> /// </remarks> public abstract class MixedRealityKeyboardBase : MonoBehaviour { #region Properties /// <summary> /// Returns true if the keyboard is currently open. /// </summary> public bool Visible => state == KeyboardState.Showing; /// <summary> /// Returns the index of the caret within the text. /// </summary> public int CaretIndex { get; private set; } = 0; [Experimental, SerializeField, Tooltip("Whether disable user's interaction with other UI elements while typing. Use this option to decrease the chance of keyboard getting accidentally closed.")] private bool disableUIInteractionWhenTyping = false; /// <summary> /// Whether disable user's interaction with other UI elements while typing. /// Use this option to decrease the chance of keyboard getting accidentally closed. /// </summary> public bool DisableUIInteractionWhenTyping { get => disableUIInteractionWhenTyping; set { if (value != disableUIInteractionWhenTyping && value == false && inputModule != null && inputModule.ProcessPaused) { inputModule.ProcessPaused = false; } disableUIInteractionWhenTyping = value; } } [SerializeField, Tooltip("Event which triggers when the keyboard is shown.")] private UnityEvent onShowKeyboard = new UnityEvent(); /// <summary> /// Event which triggers when the keyboard is shown. /// </summary> public UnityEvent OnShowKeyboard { get => onShowKeyboard; set => onShowKeyboard = value; } [SerializeField, Tooltip("Event which triggers when commit action is invoked on the keyboard. (Usually the return key.)")] private UnityEvent onCommitText = new UnityEvent(); /// <summary> /// Event which triggers when commit action is invoked on the keyboard. (Usually the return key.) /// </summary> public UnityEvent OnCommitText { get => onCommitText; set => onCommitText = value; } [SerializeField, Tooltip("Event which triggers when the keyboard is hidden.")] private UnityEvent onHideKeyboard = new UnityEvent(); /// <summary> /// Event which triggers when the keyboard is hidden. /// </summary> public UnityEvent OnHideKeyboard { get => onHideKeyboard; set => onHideKeyboard = value; } #endregion properties #region Private enums private enum KeyboardState { Hiding, Hidden, Showing, } #endregion Private enums #region Private fields private KeyboardState state = KeyboardState.Hidden; private bool multiLine = false; private MixedRealityInputModule inputModule = null; #if WINDOWS_UWP private InputPane inputPane = null; private TouchScreenKeyboard keyboard = null; private Coroutine stateUpdate; private string keyboardLanguage = string.Empty; #endif #endregion Private fields #region MonoBehaviour Implementation #if WINDOWS_UWP protected virtual void Awake() { inputModule = CameraCache.Main.GetComponent<MixedRealityInputModule>(); } /// <summary> /// Initializes the UWP input pane. /// </summary> protected virtual void Start() { UnityEngine.WSA.Application.InvokeOnUIThread(() => { inputPane = InputPane.GetForCurrentView(); inputPane.Hiding += OnInputPaneHiding; inputPane.Showing += OnInputPaneShowing; }, false); } private void OnInputPaneHiding(InputPane inputPane, InputPaneVisibilityEventArgs args) { OnKeyboardHiding(); if (DisableUIInteractionWhenTyping && inputModule != null) { inputModule.ProcessPaused = false; } } private void OnInputPaneShowing(InputPane inputPane, InputPaneVisibilityEventArgs args) { OnKeyboardShowing(); if (DisableUIInteractionWhenTyping && inputModule != null) { inputModule.ProcessPaused = true; } } void OnDestroy() { UnityEngine.WSA.Application.InvokeOnUIThread(() => { inputPane = InputPane.GetForCurrentView(); inputPane.Hiding -= OnInputPaneHiding; inputPane.Showing -= OnInputPaneShowing; }, false); } private IEnumerator UpdateState() { while (true) { switch (state) { case KeyboardState.Showing: { UpdateText(); } break; } yield return null; } } #endif // WINDOWS_UWP private void OnDisable() { HideKeyboard(); } #endregion MonoBehaviour Implementation public abstract string Text { get; protected set; } /// <summary> /// Closes the keyboard for user interaction. /// </summary> public void HideKeyboard() { if (state != KeyboardState.Hidden) { state = KeyboardState.Hidden; } #if WINDOWS_UWP UnityEngine.WSA.Application.InvokeOnUIThread(() => inputPane?.TryHide(), false); if (stateUpdate != null) { StopCoroutine(stateUpdate); stateUpdate = null; } #endif } /// <summary> /// Opens the keyboard for user interaction. /// </summary> /// <param name="text">Initial text to populate the keyboard with.</param> /// <param name="multiLine">True, if the return key should signal a newline rather than a commit.</param> public virtual void ShowKeyboard(string text = "", bool multiLine = false) { Text = text; this.multiLine = multiLine; // 2019/08/14: We show the keyboard even when the keyboard is already visible because on HoloLens 1 // and WMR the events OnKeyboardShowing and OnKeyboardHiding do not fire // if (state == KeyboardState.Showing) // { // Debug.Log($"MixedRealityKeyboard.ShowKeyboard called but keyboard already visible."); // return; // } state = KeyboardState.Showing; #if WINDOWS_UWP if (keyboard != null) { keyboard.text = Text; UnityEngine.WSA.Application.InvokeOnUIThread(() => inputPane?.TryShow(), false); } else { keyboard = TouchScreenKeyboard.Open(Text, TouchScreenKeyboardType.Default, false, this.multiLine, false, false); } onShowKeyboard?.Invoke(); #if UNITY_2019_3_OR_NEWER keyboard.selection = new RangeInt(Text.Length, 0); #endif MovePreviewCaretToEnd(); if (stateUpdate == null) { stateUpdate = StartCoroutine(UpdateState()); } #endif } /// <summary> /// Removes the current text from the keyboard. /// </summary> public virtual void ClearKeyboardText() { Text = string.Empty; CaretIndex = 0; #if WINDOWS_UWP if (keyboard != null) { keyboard.text = string.Empty; } #endif } #if WINDOWS_UWP private void UpdateText() { if (keyboard != null) { #if UNITY_2019_3_OR_NEWER Text = keyboard.text; CaretIndex = keyboard.selection.end; #else // Check the current language of the keyboard string newKeyboardLanguage = Language.CurrentInputMethodLanguageTag; if (newKeyboardLanguage != keyboardLanguage) { keyboard.text = Text; // For the languages requiring IME (Chinese, Japanese and Korean) move the caret to the end // As we do not support editing in the middle of a string if (IsIMERequired(newKeyboardLanguage)) { MovePreviewCaretToEnd(); } } keyboardLanguage = newKeyboardLanguage; var characterDelta = keyboard.text.Length - Text.Length; // Handle character deletion. if (UnityEngine.Input.GetKey(KeyCode.Backspace) || UnityEngine.Input.GetKeyDown(KeyCode.Backspace)) { // Handle languages requiring IME if (Text.Length > keyboard.text.Length && IsIMERequired(keyboardLanguage)) { Text = keyboard.text; CaretIndex = Mathf.Clamp(CaretIndex + characterDelta, 0, Text.Length); } else if (CaretIndex > 0) { Text = Text.Remove(CaretIndex - 1, 1); keyboard.text = Text; --CaretIndex; } } // Handle other character changes for languages requiring IME else if (IsIMERequired(keyboardLanguage)) { Text = keyboard.text; MovePreviewCaretToEnd(); } else { // Add the new characters. var caretWasAtEnd = IsPreviewCaretAtEnd(); if (characterDelta > 0) { var newCharacters = keyboard.text.Substring(Text.Length, characterDelta); Text = Text.Insert(CaretIndex, newCharacters); if (keyboard.text != Text) { keyboard.text = Text; } if (caretWasAtEnd) { MovePreviewCaretToEnd(); } else { CaretIndex += newCharacters.Length; } } // Handle the arrow keys. if (UnityEngine.Input.GetKeyDown(KeyCode.LeftArrow) || UnityEngine.Input.GetKey(KeyCode.LeftArrow)) { CaretIndex = Mathf.Clamp(CaretIndex - 1, 0, Text.Length); } if (UnityEngine.Input.GetKeyDown(KeyCode.RightArrow) || UnityEngine.Input.GetKey(KeyCode.RightArrow)) { CaretIndex = Mathf.Clamp(CaretIndex + 1, 0, Text.Length); } } #endif // Handle commit via the return key. if (!multiLine) { if (UnityEngine.Input.GetKeyDown(KeyCode.Return)) { onCommitText?.Invoke(); HideKeyboard(); } } SyncCaret(); } } private bool IsPreviewCaretAtEnd() => CaretIndex == Text.Length; private void MovePreviewCaretToEnd() => CaretIndex = Text.Length; private void OnKeyboardHiding() { UnityEngine.WSA.Application.InvokeOnAppThread(() => onHideKeyboard?.Invoke(), false); state = KeyboardState.Hidden; } private void OnKeyboardShowing() { } private bool IsIMERequired(string language) { return language.StartsWith("zh") || language.StartsWith("ja") || language.StartsWith("ko"); } #endif protected virtual void SyncCaret() { } } }
// // Copyright (c) 2004-2018 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // using System.Collections.Generic; namespace NLog.UnitTests.LayoutRenderers { using System.Threading; using System.Diagnostics; using System; using System.Xml; using System.Reflection; using System.IO; using Xunit; using NLog.Internal; public class Log4JXmlTests : NLogTestBase { [Fact] public void Log4JXmlTest() { LogManager.Configuration = CreateConfigurationFromString(@" <nlog throwExceptions='true'> <targets> <target name='debug' type='Debug' layout='${log4jxmlevent:includeCallSite=true:includeSourceInfo=true:includeNdlc=true:includeMdc=true:IncludeNdc=true:includeMdlc=true:IncludeAllProperties=true:ndcItemSeparator=\:\::includenlogdata=true:loggerName=${logger}}' /> </targets> <rules> <logger name='*' minlevel='Debug' writeTo='debug' /> </rules> </nlog>"); MappedDiagnosticsContext.Clear(); NestedDiagnosticsContext.Clear(); MappedDiagnosticsContext.Set("foo1", "bar1"); MappedDiagnosticsContext.Set("foo2", "bar2"); MappedDiagnosticsLogicalContext.Clear(); MappedDiagnosticsLogicalContext.Set("foo3", "bar3"); NestedDiagnosticsLogicalContext.Push("boo1"); NestedDiagnosticsLogicalContext.Push("boo2"); NestedDiagnosticsContext.Push("baz1"); NestedDiagnosticsContext.Push("baz2"); NestedDiagnosticsContext.Push("baz3"); ILogger logger = LogManager.GetLogger("A"); var logEventInfo = LogEventInfo.Create(LogLevel.Debug, "A", new Exception("Hello Exception", new Exception("Goodbye Exception")), null, "some message"); logEventInfo.Properties["nlogPropertyKey"] = "nlogPropertyValue"; logger.Log(logEventInfo); string result = GetDebugLastMessage("debug"); string wrappedResult = "<log4j:dummyRoot xmlns:log4j='http://log4j' xmlns:nlog='http://nlog'>" + result + "</log4j:dummyRoot>"; Assert.NotEqual("", result); // make sure the XML can be read back and verify some fields StringReader stringReader = new StringReader(wrappedResult); var foundsChilds = new Dictionary<string, int>(); var requiredChilds = new List<string> { "log4j.event", "log4j.message", "log4j.NDC", "log4j.locationInfo", "nlog.locationInfo", "log4j.properties", "nlog.properties", "log4j.throwable", "log4j.data", "nlog.data", }; using (XmlReader reader = XmlReader.Create(stringReader)) { while (reader.Read()) { var key = reader.LocalName; var fullKey = reader.Prefix + "." + key; if (!foundsChilds.ContainsKey(fullKey)) { foundsChilds[fullKey] = 0; } foundsChilds[fullKey]++; if (reader.NodeType == XmlNodeType.Element && reader.Prefix == "log4j") { switch (reader.LocalName) { case "dummyRoot": break; case "event": Assert.Equal("DEBUG", reader.GetAttribute("level")); Assert.Equal("A", reader.GetAttribute("logger")); var epochStart = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); long timestamp = Convert.ToInt64(reader.GetAttribute("timestamp")); var time = epochStart.AddMilliseconds(timestamp); var now = DateTime.UtcNow; Assert.True(now.Ticks - time.Ticks < TimeSpan.FromSeconds(3).Ticks); Assert.Equal(Thread.CurrentThread.ManagedThreadId.ToString(), reader.GetAttribute("thread")); break; case "message": reader.Read(); Assert.Equal("some message", reader.Value); break; case "NDC": reader.Read(); Assert.Equal("baz3::baz2::baz1::boo2 boo1", reader.Value); break; case "locationInfo": Assert.Equal(MethodBase.GetCurrentMethod().DeclaringType.FullName, reader.GetAttribute("class")); Assert.Equal(MethodBase.GetCurrentMethod().ToString(), reader.GetAttribute("method")); break; case "properties": break; case "throwable": reader.Read(); Assert.Contains("Hello Exception", reader.Value); Assert.Contains("Goodbye Exception", reader.Value); break; case "data": string name = reader.GetAttribute("name"); string value = reader.GetAttribute("value"); switch (name) { case "log4japp": Assert.Equal(AppDomain.CurrentDomain.FriendlyName + "(" + Process.GetCurrentProcess().Id + ")", value); break; case "log4jmachinename": Assert.Equal(Environment.MachineName, value); break; case "foo1": Assert.Equal("bar1", value); break; case "foo2": Assert.Equal("bar2", value); break; case "foo3": Assert.Equal("bar3", value); break; case "nlogPropertyKey": Assert.Equal("nlogPropertyValue", value); break; default: Assert.True(false, "Unknown <log4j:data>: " + name); break; } break; default: throw new NotSupportedException("Unknown element: " + key); } continue; } if (reader.NodeType == XmlNodeType.Element && reader.Prefix == "nlog") { switch (key) { case "eventSequenceNumber": break; case "locationInfo": Assert.Equal(GetType().Assembly.FullName, reader.GetAttribute("assembly")); break; case "properties": break; case "data": var name = reader.GetAttribute("name"); var value = reader.GetAttribute("value"); Assert.Equal("nlogPropertyKey", name); Assert.Equal("nlogPropertyValue", value); break; default: throw new NotSupportedException("Unknown element: " + key); } } } } foreach (var required in requiredChilds) { Assert.True(foundsChilds.ContainsKey(required), $"{required} not found!"); } } [Fact] void BadXmlValueTest() { var sb = new System.Text.StringBuilder(); var forbidden = new HashSet<int>(); int start = 64976; int end = 65007; for (int i = start; i <= end; i++) { forbidden.Add(i); } forbidden.Add(0xFFFE); forbidden.Add(0xFFFF); for (int i = char.MinValue; i <= char.MaxValue; i++) { char c = Convert.ToChar(i); if (char.IsSurrogate(c)) { continue; // skip surrogates } if (forbidden.Contains(c)) { continue; } sb.Append(c); } var badString = sb.ToString(); var settings = new XmlWriterSettings { Indent = true, ConformanceLevel = ConformanceLevel.Fragment, IndentChars = " ", }; sb.Length = 0; using (XmlWriter xtw = XmlWriter.Create(sb, settings)) { xtw.WriteStartElement("log4j", "event", "http:://hello/"); xtw.WriteElementSafeString("log4j", "message", "http:://hello/", badString); xtw.WriteEndElement(); xtw.Flush(); } string goodString = null; using (XmlReader reader = XmlReader.Create(new StringReader(sb.ToString()))) { while (reader.Read()) { if (reader.NodeType == XmlNodeType.Text) { if (reader.Value.Contains("abc")) goodString = reader.Value; } } } Assert.NotNull(goodString); Assert.NotEqual(badString.Length, goodString.Length); Assert.Contains("abc", badString); Assert.Contains("abc", goodString); } } }
// ----------------------------------------------------------------------------------------- // <copyright file="CloudStorageAccountTests.cs" company="Microsoft"> // Copyright 2013 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // </copyright> // ----------------------------------------------------------------------------------------- using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.Azure.Storage.Auth; using Microsoft.Azure.Storage.Blob; using Microsoft.Azure.Storage.File; using Microsoft.Azure.Storage.Queue; using Microsoft.Azure.Storage.Shared.Protocol; using System; using System.Globalization; namespace Microsoft.Azure.Storage.Core.Util { [TestClass] public class CloudStorageAccountTests : TestBase { private string token = "?sp=abcde&sig=1"; private string tokenWithApiVersion = "?sp=abcde&sig=1&api-version=2015-04-05"; [TestMethod] [Description("Anonymous credentials")] [TestCategory(ComponentCategory.Auth)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void StorageCredentialsAnonymous() { StorageCredentials cred = new StorageCredentials(); Assert.IsNull(cred.AccountName); Assert.IsTrue(cred.IsAnonymous); Assert.IsFalse(cred.IsSAS); Assert.IsFalse(cred.IsSharedKey); Uri testUri = new Uri("http://test/abc?querya=1"); Assert.AreEqual(testUri, cred.TransformUri(testUri)); byte[] dummyKey = { 0, 1, 2 }; string base64EncodedDummyKey = Convert.ToBase64String(dummyKey); TestHelper.ExpectedException<InvalidOperationException>( () => cred.UpdateKey(base64EncodedDummyKey), "Updating shared key on an anonymous credentials instance should fail."); } [TestMethod] [Description("Shared key credentials")] [TestCategory(ComponentCategory.Auth)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void StorageCredentialsSharedKey() { StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey); Assert.AreEqual(TestBase.TargetTenantConfig.AccountName, cred.AccountName, false); Assert.IsFalse(cred.IsAnonymous); Assert.IsFalse(cred.IsSAS); Assert.IsTrue(cred.IsSharedKey); Uri testUri = new Uri("http://test/abc?querya=1"); Assert.AreEqual(testUri, cred.TransformUri(testUri)); Assert.AreEqual(TestBase.TargetTenantConfig.AccountKey, cred.ExportBase64EncodedKey()); byte[] dummyKey = { 0, 1, 2 }; string base64EncodedDummyKey = Convert.ToBase64String(dummyKey); cred.UpdateKey(base64EncodedDummyKey); Assert.AreEqual(base64EncodedDummyKey, cred.ExportBase64EncodedKey()); #if !(WINDOWS_RT || NETCORE) dummyKey[0] = 3; base64EncodedDummyKey = Convert.ToBase64String(dummyKey); cred.UpdateKey(dummyKey); Assert.AreEqual(base64EncodedDummyKey, cred.ExportBase64EncodedKey()); #endif } [TestMethod] [Description("SAS token credentials")] [TestCategory(ComponentCategory.Auth)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void StorageCredentialsSAS() { StorageCredentials cred = new StorageCredentials(token); Assert.IsNull(cred.AccountName); Assert.IsFalse(cred.IsAnonymous); Assert.IsTrue(cred.IsSAS); Assert.IsFalse(cred.IsSharedKey); Uri testUri = new Uri("http://test/abc"); Assert.AreEqual(testUri.AbsoluteUri + token + "&" + Constants.QueryConstants.ApiVersion + "=" + Constants.HeaderConstants.TargetStorageVersion, cred.TransformUri(testUri).AbsoluteUri, true); testUri = new Uri("http://test/abc?query=a&query2=b"); string expectedUri = testUri.AbsoluteUri + "&" + token.Substring(1) + "&" + Constants.QueryConstants.ApiVersion + "=" + Constants.HeaderConstants.TargetStorageVersion; Assert.AreEqual(expectedUri, cred.TransformUri(testUri).AbsoluteUri, true); byte[] dummyKey = { 0, 1, 2 }; string base64EncodedDummyKey = Convert.ToBase64String(dummyKey); TestHelper.ExpectedException<InvalidOperationException>( () => cred.UpdateKey(base64EncodedDummyKey), "Updating shared key on a SAS credentials instance should fail."); TestHelper.ExpectedException<ArgumentException>( () => new StorageCredentials(tokenWithApiVersion), "Unexpected 'api-version' parameter included in the SAS token."); } [TestMethod] [Description("CloudStorageAccount object with an empty key value.")] [TestCategory(ComponentCategory.Auth)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void StorageCredentialsEmptyKeyValue() { string accountName = TestBase.TargetTenantConfig.AccountName; string keyValue = TestBase.TargetTenantConfig.AccountKey; string emptyKeyValueAsString = string.Empty; string emptyKeyConnectionString = string.Format(CultureInfo.InvariantCulture, "DefaultEndpointsProtocol=https;AccountName={0};AccountKey=", accountName); StorageCredentials credentials1 = new StorageCredentials(accountName, emptyKeyValueAsString); Assert.AreEqual(accountName, credentials1.AccountName); Assert.IsFalse(credentials1.IsAnonymous); Assert.IsFalse(credentials1.IsSAS); Assert.IsTrue(credentials1.IsSharedKey); Assert.AreEqual(emptyKeyValueAsString, Convert.ToBase64String(credentials1.ExportKey())); CloudStorageAccount account1 = new CloudStorageAccount(credentials1, true); Assert.AreEqual(emptyKeyConnectionString, account1.ToString(true)); Assert.IsNotNull(account1.Credentials); Assert.AreEqual(accountName, account1.Credentials.AccountName); Assert.IsFalse(account1.Credentials.IsAnonymous); Assert.IsFalse(account1.Credentials.IsSAS); Assert.IsTrue(account1.Credentials.IsSharedKey); Assert.AreEqual(emptyKeyValueAsString, Convert.ToBase64String(account1.Credentials.ExportKey())); CloudStorageAccount account2 = CloudStorageAccount.Parse(emptyKeyConnectionString); Assert.AreEqual(emptyKeyConnectionString, account2.ToString(true)); Assert.IsNotNull(account2.Credentials); Assert.AreEqual(accountName, account2.Credentials.AccountName); Assert.IsFalse(account2.Credentials.IsAnonymous); Assert.IsFalse(account2.Credentials.IsSAS); Assert.IsTrue(account2.Credentials.IsSharedKey); Assert.AreEqual(emptyKeyValueAsString, Convert.ToBase64String(account2.Credentials.ExportKey())); CloudStorageAccount account3; bool isValidAccount3 = CloudStorageAccount.TryParse(emptyKeyConnectionString, out account3); Assert.IsTrue(isValidAccount3); Assert.IsNotNull(account3); Assert.AreEqual(emptyKeyConnectionString, account3.ToString(true)); Assert.IsNotNull(account3.Credentials); Assert.AreEqual(accountName, account3.Credentials.AccountName); Assert.IsFalse(account3.Credentials.IsAnonymous); Assert.IsFalse(account3.Credentials.IsSAS); Assert.IsTrue(account3.Credentials.IsSharedKey); Assert.AreEqual(emptyKeyValueAsString, Convert.ToBase64String(account3.Credentials.ExportKey())); StorageCredentials credentials2 = new StorageCredentials(accountName, keyValue); Assert.AreEqual(accountName, credentials2.AccountName); Assert.IsFalse(credentials2.IsAnonymous); Assert.IsFalse(credentials2.IsSAS); Assert.IsTrue(credentials2.IsSharedKey); Assert.AreEqual(keyValue, Convert.ToBase64String(credentials2.ExportKey())); credentials2.UpdateKey(emptyKeyValueAsString, null); Assert.AreEqual(emptyKeyValueAsString, Convert.ToBase64String(credentials2.ExportKey())); #if !(WINDOWS_RT || NETCORE) byte[] emptyKeyValueAsByteArray = new byte[0]; StorageCredentials credentials3 = new StorageCredentials(accountName, keyValue); Assert.AreEqual(accountName, credentials3.AccountName); Assert.IsFalse(credentials3.IsAnonymous); Assert.IsFalse(credentials3.IsSAS); Assert.IsTrue(credentials3.IsSharedKey); Assert.AreEqual(keyValue, Convert.ToBase64String(credentials3.ExportKey())); credentials3.UpdateKey(emptyKeyValueAsByteArray, null); Assert.AreEqual(Convert.ToBase64String(emptyKeyValueAsByteArray), Convert.ToBase64String(credentials3.ExportKey())); #endif } [TestMethod] [Description("CloudStorageAccount object with a null key value.")] [TestCategory(ComponentCategory.Auth)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void StorageCredentialsNullKeyValue() { string accountName = TestBase.TargetTenantConfig.AccountName; string keyValue = TestBase.TargetTenantConfig.AccountKey; string nullKeyValueAsString = null; TestHelper.ExpectedException<ArgumentNullException>(() => { StorageCredentials credentials1 = new StorageCredentials(accountName, nullKeyValueAsString); }, "Cannot create key with a null value."); StorageCredentials credentials2 = new StorageCredentials(accountName, keyValue); Assert.AreEqual(accountName, credentials2.AccountName); Assert.IsFalse(credentials2.IsAnonymous); Assert.IsFalse(credentials2.IsSAS); Assert.IsTrue(credentials2.IsSharedKey); Assert.AreEqual(keyValue, Convert.ToBase64String(credentials2.ExportKey())); TestHelper.ExpectedException<ArgumentNullException>(() => { credentials2.UpdateKey(nullKeyValueAsString, null); }, "Cannot update key with a null string value."); #if !(WINDOWS_RT || NETCORE) byte[] nullKeyValueAsByteArray = null; StorageCredentials credentials3 = new StorageCredentials(accountName, keyValue); Assert.AreEqual(accountName, credentials3.AccountName); Assert.IsFalse(credentials3.IsAnonymous); Assert.IsFalse(credentials3.IsSAS); Assert.IsTrue(credentials3.IsSharedKey); Assert.AreEqual(keyValue, Convert.ToBase64String(credentials3.ExportKey())); TestHelper.ExpectedException<ArgumentNullException>(() => { credentials3.UpdateKey(nullKeyValueAsByteArray, null); }, "Cannot update key with a null byte array value."); #endif } [TestMethod] [Description("Compare credentials for equality")] [TestCategory(ComponentCategory.Auth)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void StorageCredentialsEquality() { StorageCredentials credSharedKey1 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey); StorageCredentials credSharedKey2 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey); StorageCredentials credSharedKey3 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName + "1", TestBase.TargetTenantConfig.AccountKey); StorageCredentials credSharedKey4 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, Convert.ToBase64String(new byte[] { 0, 1, 2 })); StorageCredentials credSAS1 = new StorageCredentials(token); StorageCredentials credSAS2 = new StorageCredentials(token); StorageCredentials credSAS3 = new StorageCredentials(token + "1"); StorageCredentials credAnonymous1 = new StorageCredentials(); StorageCredentials credAnonymous2 = new StorageCredentials(); StorageCredentials tokenCredential1 = new StorageCredentials(new TokenCredential("0")); StorageCredentials tokenCredential2 = new StorageCredentials(new TokenCredential("1")); StorageCredentials tokenCredential3 = new StorageCredentials(new TokenCredential("0")); Assert.IsTrue(credSharedKey1.Equals(credSharedKey2)); Assert.IsFalse(credSharedKey1.Equals(credSharedKey3)); Assert.IsFalse(credSharedKey1.Equals(credSharedKey4)); Assert.IsTrue(credSAS1.Equals(credSAS2)); Assert.IsFalse(credSAS1.Equals(credSAS3)); Assert.IsTrue(credAnonymous1.Equals(credAnonymous2)); Assert.IsFalse(credSharedKey1.Equals(credSAS1)); Assert.IsFalse(credSharedKey1.Equals(credAnonymous1)); Assert.IsFalse(credSAS1.Equals(credAnonymous1)); Assert.IsFalse(tokenCredential1.Equals(tokenCredential2)); Assert.IsTrue(tokenCredential1.Equals(tokenCredential3)); } private void AccountsAreEqual(CloudStorageAccount a, CloudStorageAccount b) { // endpoints are the same Assert.AreEqual(a.BlobEndpoint, b.BlobEndpoint); Assert.AreEqual(a.QueueEndpoint, b.QueueEndpoint); Assert.AreEqual(a.TableEndpoint, b.TableEndpoint); Assert.AreEqual(a.FileEndpoint, b.FileEndpoint); // storage uris are the same Assert.AreEqual(a.BlobStorageUri, b.BlobStorageUri); Assert.AreEqual(a.QueueStorageUri, b.QueueStorageUri); Assert.AreEqual(a.TableStorageUri, b.TableStorageUri); Assert.AreEqual(a.FileStorageUri, b.FileStorageUri); // seralized representatons are the same. string aToStringNoSecrets = a.ToString(); string aToStringWithSecrets = a.ToString(true); string bToStringNoSecrets = b.ToString(false); string bToStringWithSecrets = b.ToString(true); Assert.AreEqual(aToStringNoSecrets, bToStringNoSecrets, false); Assert.AreEqual(aToStringWithSecrets, bToStringWithSecrets, false); // credentials are the same if (a.Credentials != null && b.Credentials != null) { Assert.AreEqual(a.Credentials.IsAnonymous, b.Credentials.IsAnonymous); Assert.AreEqual(a.Credentials.IsSAS, b.Credentials.IsSAS); Assert.AreEqual(a.Credentials.IsSharedKey, b.Credentials.IsSharedKey); // make sure if (!a.Credentials.IsAnonymous && a.Credentials != CloudStorageAccount.DevelopmentStorageAccount.Credentials && b.Credentials != CloudStorageAccount.DevelopmentStorageAccount.Credentials) { Assert.AreNotEqual(aToStringWithSecrets, bToStringNoSecrets, true); } } else if (a.Credentials == null && b.Credentials == null) { return; } else { Assert.Fail("credentials mismatch"); } } [TestMethod] [Description("DevStore account")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevelopmentStorageAccount() { CloudStorageAccount devstoreAccount = CloudStorageAccount.DevelopmentStorageAccount; Assert.AreEqual(devstoreAccount.BlobEndpoint, new Uri("http://127.0.0.1:10000/devstoreaccount1")); Assert.AreEqual(devstoreAccount.QueueEndpoint, new Uri("http://127.0.0.1:10001/devstoreaccount1")); Assert.AreEqual(devstoreAccount.TableEndpoint, new Uri("http://127.0.0.1:10002/devstoreaccount1")); Assert.AreEqual(devstoreAccount.BlobStorageUri.SecondaryUri, new Uri("http://127.0.0.1:10000/devstoreaccount1-secondary")); Assert.AreEqual(devstoreAccount.QueueStorageUri.SecondaryUri, new Uri("http://127.0.0.1:10001/devstoreaccount1-secondary")); Assert.AreEqual(devstoreAccount.TableStorageUri.SecondaryUri, new Uri("http://127.0.0.1:10002/devstoreaccount1-secondary")); Assert.IsNull(devstoreAccount.FileStorageUri); string devstoreAccountToStringWithSecrets = devstoreAccount.ToString(true); CloudStorageAccount testAccount = CloudStorageAccount.Parse(devstoreAccountToStringWithSecrets); AccountsAreEqual(testAccount, devstoreAccount); CloudStorageAccount acct; if (!CloudStorageAccount.TryParse(devstoreAccountToStringWithSecrets, out acct)) { Assert.Fail("Expected TryParse success."); } } [TestMethod] [Description("Regular account with HTTP")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDefaultStorageAccountWithHttp() { StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey); CloudStorageAccount cloudStorageAccount = new CloudStorageAccount(cred, false); Assert.AreEqual(cloudStorageAccount.BlobEndpoint, new Uri(String.Format("http://{0}.blob.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.QueueEndpoint, new Uri(String.Format("http://{0}.queue.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.TableEndpoint, new Uri(String.Format("http://{0}.table.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.FileEndpoint, new Uri(String.Format("http://{0}.file.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.BlobStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.blob.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.QueueStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.queue.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.TableStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.table.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.FileStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.file.core.windows.net", TestBase.TargetTenantConfig.AccountName))); string cloudStorageAccountToStringNoSecrets = cloudStorageAccount.ToString(); string cloudStorageAccountToStringWithSecrets = cloudStorageAccount.ToString(true); CloudStorageAccount testAccount = CloudStorageAccount.Parse(cloudStorageAccountToStringWithSecrets); AccountsAreEqual(testAccount, cloudStorageAccount); } [TestMethod] [Description("Regular account with HTTPS")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDefaultStorageAccountWithHttps() { StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey); CloudStorageAccount cloudStorageAccount = new CloudStorageAccount(cred, true); Assert.AreEqual(cloudStorageAccount.BlobEndpoint, new Uri(String.Format("https://{0}.blob.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.QueueEndpoint, new Uri(String.Format("https://{0}.queue.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.TableEndpoint, new Uri(String.Format("https://{0}.table.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.FileEndpoint, new Uri(String.Format("https://{0}.file.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.BlobStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.blob.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.QueueStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.queue.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.TableStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.table.core.windows.net", TestBase.TargetTenantConfig.AccountName))); Assert.AreEqual(cloudStorageAccount.FileStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.file.core.windows.net", TestBase.TargetTenantConfig.AccountName))); string cloudStorageAccountToStringWithSecrets = cloudStorageAccount.ToString(true); CloudStorageAccount testAccount = CloudStorageAccount.Parse(cloudStorageAccountToStringWithSecrets); AccountsAreEqual(testAccount, cloudStorageAccount); } [TestMethod] [Description("Regular account with HTTP")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountEndpointSuffixWithHttp() { const string TestEndpointSuffix = "fake.endpoint.suffix"; CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse( string.Format( "DefaultEndpointsProtocol=http;AccountName={0};AccountKey={1};EndpointSuffix={2};", TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey, TestEndpointSuffix)); Assert.AreEqual(cloudStorageAccount.BlobEndpoint, new Uri(String.Format("http://{0}.blob.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.QueueEndpoint, new Uri(String.Format("http://{0}.queue.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.TableEndpoint, new Uri(String.Format("http://{0}.table.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.FileEndpoint, new Uri(String.Format("http://{0}.file.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.BlobStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.blob.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.QueueStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.queue.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.TableStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.table.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.FileStorageUri.SecondaryUri, new Uri(String.Format("http://{0}-secondary.file.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); string cloudStorageAccountToStringWithSecrets = cloudStorageAccount.ToString(true); CloudStorageAccount testAccount = CloudStorageAccount.Parse(cloudStorageAccountToStringWithSecrets); AccountsAreEqual(testAccount, cloudStorageAccount); } [TestMethod] [Description("Regular account with HTTPS")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountEndpointSuffixWithHttps() { const string TestEndpointSuffix = "fake.endpoint.suffix"; CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse( string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2};", TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey, TestEndpointSuffix)); Assert.AreEqual(cloudStorageAccount.BlobEndpoint, new Uri(String.Format("https://{0}.blob.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.QueueEndpoint, new Uri(String.Format("https://{0}.queue.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.TableEndpoint, new Uri(String.Format("https://{0}.table.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.FileEndpoint, new Uri(String.Format("https://{0}.file.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.BlobStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.blob.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.QueueStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.queue.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.TableStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.table.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); Assert.AreEqual(cloudStorageAccount.FileStorageUri.SecondaryUri, new Uri(String.Format("https://{0}-secondary.file.{1}", TestBase.TargetTenantConfig.AccountName, TestEndpointSuffix))); string cloudStorageAccountToStringWithSecrets = cloudStorageAccount.ToString(true); CloudStorageAccount testAccount = CloudStorageAccount.Parse(cloudStorageAccountToStringWithSecrets); AccountsAreEqual(testAccount, cloudStorageAccount); } [TestMethod] [Description("Regular account with HTTP")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountEndpointSuffixWithBlob() { const string TestEndpointSuffix = "fake.endpoint.suffix"; const string AlternateBlobEndpoint = "http://blob.other.endpoint/"; CloudStorageAccount testAccount = CloudStorageAccount.Parse( string.Format( "DefaultEndpointsProtocol=http;AccountName={0};AccountKey={1};EndpointSuffix={2};BlobEndpoint={3}", TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey, TestEndpointSuffix, AlternateBlobEndpoint)); CloudStorageAccount cloudStorageAccount = CloudStorageAccount.Parse(testAccount.ToString(true)); // make sure it round trips this.AccountsAreEqual(testAccount, cloudStorageAccount); } [TestMethod] [Description("Regular account with HTTP")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountConnectionStringRoundtrip() { string[] accountKeyParams = new[] { TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey, "fake.endpoint.suffix", "https://primary.endpoint/", "https://secondary.endpoint/" }; string[] accountSasParams = new[] { TestBase.TargetTenantConfig.AccountName, "sasTest", "fake.endpoint.suffix", "https://primary.endpoint/", "https://secondary.endpoint/" }; // account key string accountString1 = string.Format( "DefaultEndpointsProtocol=http;AccountName={0};AccountKey={1};EndpointSuffix={2};", accountKeyParams); string accountString2 = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};", accountKeyParams); string accountString3 = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};QueueEndpoint={3}", accountKeyParams); string accountString4 = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2};QueueEndpoint={3}", accountKeyParams); connectionStringRoundtripHelper(accountString1); connectionStringRoundtripHelper(accountString2); connectionStringRoundtripHelper(accountString3); connectionStringRoundtripHelper(accountString4); string accountString5 = string.Format( "AccountName={0};AccountKey={1};EndpointSuffix={2};", accountKeyParams); string accountString6 = string.Format( "AccountName={0};AccountKey={1};", accountKeyParams); string accountString7 = string.Format( "AccountName={0};AccountKey={1};QueueEndpoint={3}", accountKeyParams); string accountString8 = string.Format( "AccountName={0};AccountKey={1};EndpointSuffix={2};QueueEndpoint={3}", accountKeyParams); connectionStringRoundtripHelper(accountString5); connectionStringRoundtripHelper(accountString6); connectionStringRoundtripHelper(accountString7); connectionStringRoundtripHelper(accountString8); // shared access string accountString9 = string.Format( "DefaultEndpointsProtocol=http;AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};", accountSasParams); string accountString10 = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};SharedAccessSignature={1};", accountSasParams); string accountString11 = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};SharedAccessSignature={1};QueueEndpoint={3}", accountSasParams); string accountString12 = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};QueueEndpoint={3}", accountSasParams); connectionStringRoundtripHelper(accountString9); connectionStringRoundtripHelper(accountString10); connectionStringRoundtripHelper(accountString11); connectionStringRoundtripHelper(accountString12); string accountString13 = string.Format( "AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};", accountSasParams); string accountString14 = string.Format( "AccountName={0};SharedAccessSignature={1};", accountSasParams); string accountString15 = string.Format( "AccountName={0};SharedAccessSignature={1};QueueEndpoint={3}", accountSasParams); string accountString16 = string.Format( "AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};QueueEndpoint={3}", accountSasParams); connectionStringRoundtripHelper(accountString13); connectionStringRoundtripHelper(accountString14); connectionStringRoundtripHelper(accountString15); connectionStringRoundtripHelper(accountString16); // shared access no account name string accountString17 = string.Format( "SharedAccessSignature={1};QueueEndpoint={3}", accountSasParams); connectionStringRoundtripHelper(accountString17); } [TestMethod] [Description("Regular account with HTTP")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountConnectionStringExpectedExceptions() { string[][] endpointCombinations = new[] { new[] { "BlobEndpoint={3}", "BlobSecondaryEndpoint={4}", "BlobEndpoint={3};BlobSecondaryEndpoint={4}" }, new[] { "QueueEndpoint={3}", "QueueSecondaryEndpoint={4}", "QueueEndpoint={3};QueueSecondaryEndpoint={4}" }, new[] { "TableEndpoint={3}", "TableSecondaryEndpoint={4}", "TableEndpoint={3};TableSecondaryEndpoint={4}" }, new[] { "FileEndpoint={3}", "FileSecondaryEndpoint={4}", "FileEndpoint={3};FileSecondaryEndpoint={4}" } }; string[] accountKeyParams = new[] { TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey, "fake.endpoint.suffix", "https://primary.endpoint/", "https://secondary.endpoint/" }; string[] accountSasParams = new[] { TestBase.TargetTenantConfig.AccountName, "sasTest", "fake.endpoint.suffix", "https://primary.endpoint/", "https://secondary.endpoint/" }; foreach (string[] endpointCombination in endpointCombinations) { // account key string accountStringKeyPrimary = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2};" + endpointCombination[0], accountKeyParams ); string accountStringKeySecondary = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2};" + endpointCombination[1], accountKeyParams ); string accountStringKeyPrimarySecondary = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};AccountKey={1};EndpointSuffix={2};" + endpointCombination[2], accountKeyParams ); CloudStorageAccount.Parse(accountStringKeyPrimary); // no exception expected TestHelper.ExpectedException<FormatException>(() => CloudStorageAccount.Parse(accountStringKeySecondary), "connection string parse", "No valid combination of account information found."); CloudStorageAccount.Parse(accountStringKeyPrimarySecondary); // no exception expected // account key, no default protocol string accountStringKeyNoDefaultProtocolPrimary = string.Format( "AccountName={0};AccountKey={1};EndpointSuffix={2};" + endpointCombination[0], accountKeyParams ); string accountStringKeyNoDefaultProtocolSecondary = string.Format( "AccountName={0};AccountKey={1};EndpointSuffix={2};" + endpointCombination[1], accountKeyParams ); string accountStringKeyNoDefaultProtocolPrimarySecondary = string.Format( "AccountName={0};AccountKey={1};EndpointSuffix={2};" + endpointCombination[2], accountKeyParams ); CloudStorageAccount.Parse(accountStringKeyNoDefaultProtocolPrimary); // no exception expected TestHelper.ExpectedException<FormatException>(() => CloudStorageAccount.Parse(accountStringKeyNoDefaultProtocolSecondary), "connection string parse", "No valid combination of account information found."); CloudStorageAccount.Parse(accountStringKeyNoDefaultProtocolPrimarySecondary); // no exception expected // SAS string accountStringSasPrimary = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};" + endpointCombination[0], accountSasParams ); string accountStringSasSecondary = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};" + endpointCombination[1], accountSasParams ); string accountStringSasPrimarySecondary = string.Format( "DefaultEndpointsProtocol=https;AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};" + endpointCombination[2], accountSasParams ); CloudStorageAccount.Parse(accountStringSasPrimary); // no exception expected TestHelper.ExpectedException<FormatException>(() => CloudStorageAccount.Parse(accountStringSasSecondary), "connection string parse", "No valid combination of account information found."); CloudStorageAccount.Parse(accountStringSasPrimarySecondary); // no exception expected // SAS, no default protocol string accountStringSasNoDefaultProtocolPrimary = string.Format( "AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};" + endpointCombination[0], accountSasParams ); string accountStringSasNoDefaultProtocolSecondary = string.Format( "AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};" + endpointCombination[1], accountSasParams ); string accountStringSasNoDefaultProtocolPrimarySecondary = string.Format( "AccountName={0};SharedAccessSignature={1};EndpointSuffix={2};" + endpointCombination[2], accountSasParams ); CloudStorageAccount.Parse(accountStringSasNoDefaultProtocolPrimary); // no exception expected TestHelper.ExpectedException<FormatException>(() => CloudStorageAccount.Parse(accountStringSasNoDefaultProtocolSecondary), "connection string parse", "No valid combination of account information found."); CloudStorageAccount.Parse(accountStringSasNoDefaultProtocolPrimarySecondary); // no exception expected // SAS without AccountName string accountStringSasNoNameNoEndpoint = string.Format( "SharedAccessSignature={1}", accountSasParams ); string accountStringSasNoNamePrimary = string.Format( "SharedAccessSignature={1};" + endpointCombination[0], accountSasParams ); string accountStringSasNoNameSecondary = string.Format( "SharedAccessSignature={1};" + endpointCombination[1], accountSasParams ); string accountStringSasNoNamePrimarySecondary = string.Format( "SharedAccessSignature={1};" + endpointCombination[2], accountSasParams ); TestHelper.ExpectedException<FormatException>(() => CloudStorageAccount.Parse(accountStringSasNoNameNoEndpoint), "connection string parse", "No valid combination of account information found."); CloudStorageAccount.Parse(accountStringSasNoNamePrimary); // no exception expected TestHelper.ExpectedException<FormatException>(() => CloudStorageAccount.Parse(accountStringSasNoNameSecondary), "connection string parse", "No valid combination of account information found."); CloudStorageAccount.Parse(accountStringSasNoNamePrimarySecondary); // no exception expected } } private void connectionStringRoundtripHelper(string accountString) { CloudStorageAccount originalAccount = CloudStorageAccount.Parse(accountString); string copiedAccountString = originalAccount.ToString(true); CloudStorageAccount copiedAccount = CloudStorageAccount.Parse(copiedAccountString); // make sure it round trips this.AccountsAreEqual(originalAccount, copiedAccount); } [TestMethod] [Description("Service client creation methods")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountClientMethods() { CloudStorageAccount account = new CloudStorageAccount(TestBase.StorageCredentials, false); CloudBlobClient blob = account.CreateCloudBlobClient(); CloudQueueClient queue = account.CreateCloudQueueClient(); CloudFileClient file = account.CreateCloudFileClient(); // check endpoints Assert.AreEqual(account.BlobEndpoint, blob.BaseUri, "Blob endpoint doesn't match account"); Assert.AreEqual(account.QueueEndpoint, queue.BaseUri, "Queue endpoint doesn't match account"); Assert.AreEqual(account.FileEndpoint, file.BaseUri, "File endpoint doesn't match account"); // check storage uris Assert.AreEqual(account.BlobStorageUri, blob.StorageUri, "Blob endpoint doesn't match account"); Assert.AreEqual(account.QueueStorageUri, queue.StorageUri, "Queue endpoint doesn't match account"); Assert.AreEqual(account.FileStorageUri, file.StorageUri, "File endpoint doesn't match account"); // check creds Assert.AreEqual(account.Credentials, blob.Credentials, "Blob creds don't match account"); Assert.AreEqual(account.Credentials, queue.Credentials, "Queue creds don't match account"); Assert.AreEqual(account.Credentials, file.Credentials, "File creds don't match account"); } [TestMethod] [Description("Service client creation methods")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountClientUriVerify() { string myAccountName = TestBase.TargetTenantConfig.AccountName; string myAccountKey = TestBase.TargetTenantConfig.AccountKey; #region sample_CloudStorageAccount_Constructor // Create a CloudStorageAccount object using account name and key. // The account name should be just the name of a Storage Account, not a URI, and // not including the suffix. The key should be a base-64 encoded string that you // can acquire from the portal, or from the management plane. // This will have full permissions to all operations on the account. StorageCredentials storageCredentials = new StorageCredentials(myAccountName, myAccountKey); CloudStorageAccount cloudStorageAccount = new CloudStorageAccount(storageCredentials, useHttps: true); // Create a CloudBlobClient object from the storage account. // This object is the root object for all operations on the // blob service for this particular account. CloudBlobClient blobClient = cloudStorageAccount.CreateCloudBlobClient(); // Get a reference to a CloudBlobContainer object in this account. // This object can be used to create the container on the service, // list blobs, delete the container, etc. This operation does not make a // call to the Azure Storage service. It neither creates the container // on the service, nor validates its existence. CloudBlobContainer container = blobClient.GetContainerReference("container1"); // Create a CloudQueueClient object from the storage account. // This object is the root object for all operations on the // queue service for this particular account. CloudQueueClient queueClient = cloudStorageAccount.CreateCloudQueueClient(); // Get a reference to a CloudQueue object in this account. // This object can be used to create the queue on the service, // delete the queue, add messages, etc. This operation does not // make a call to the Azure Storage service. It neither creates // the queue on the service, nor validates its existence. CloudQueue queue = queueClient.GetQueueReference("queue1"); // Create a CloudFileClient object from the storage account. // This object is the root object for all operations on the // file service for this particular account. CloudFileClient fileClient = cloudStorageAccount.CreateCloudFileClient(); // Get a reference to a CloudFileShare object in this account. // This object can be used to create the share on the service, // delete the share, list files and directories, etc. This operation // does not make a call to the Azure Storage service. It neither // creates the share on the service, nor validates its existence. CloudFileShare share = fileClient.GetShareReference("share1"); #endregion Assert.AreEqual(cloudStorageAccount.BlobEndpoint.ToString() + "container1", container.Uri.ToString()); Assert.AreEqual(cloudStorageAccount.QueueEndpoint.ToString() + "queue1", queue.Uri.ToString()); Assert.AreEqual(cloudStorageAccount.FileEndpoint.ToString() + "share1", share.Uri.ToString()); } [TestMethod] [Description("TryParse should return false for invalid connection strings")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountTryParseNullEmpty() { CloudStorageAccount account; // TryParse should not throw exception when passing in null or empty string Assert.IsFalse(CloudStorageAccount.TryParse(null, out account)); Assert.IsFalse(CloudStorageAccount.TryParse(string.Empty, out account)); } [TestMethod] [Description("UseDevelopmentStorage=false should fail")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevStoreNonTrueFails() { CloudStorageAccount account; Assert.IsFalse(CloudStorageAccount.TryParse("UseDevelopmentStorage=false", out account)); } [TestMethod] [Description("UseDevelopmentStorage should fail when used with an account name")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevStorePlusAccountFails() { CloudStorageAccount account; Assert.IsFalse(CloudStorageAccount.TryParse("UseDevelopmentStorage=false;AccountName=devstoreaccount1", out account)); } [TestMethod] [Description("UseDevelopmentStorage should fail when used with a custom endpoint")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevStorePlusEndpointFails() { CloudStorageAccount account; Assert.IsFalse(CloudStorageAccount.TryParse("UseDevelopmentStorage=false;BlobEndpoint=http://127.0.0.1:1000/devstoreaccount1", out account)); } [TestMethod] [Description("Custom endpoints")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDefaultEndpointOverride() { CloudStorageAccount account; Assert.IsTrue(CloudStorageAccount.TryParse("DefaultEndpointsProtocol=http;BlobEndpoint=http://customdomain.com/;AccountName=asdf;AccountKey=123=", out account)); Assert.AreEqual(new Uri("http://customdomain.com/"), account.BlobEndpoint); Assert.IsNull(account.BlobStorageUri.SecondaryUri); } [TestMethod] [Description("Use DevStore with a proxy")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevStoreProxyUri() { CloudStorageAccount account; Assert.IsTrue(CloudStorageAccount.TryParse("UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://ipv4.fiddler", out account)); Assert.AreEqual(new Uri("http://ipv4.fiddler:10000/devstoreaccount1"), account.BlobEndpoint); Assert.AreEqual(new Uri("http://ipv4.fiddler:10001/devstoreaccount1"), account.QueueEndpoint); Assert.AreEqual(new Uri("http://ipv4.fiddler:10002/devstoreaccount1"), account.TableEndpoint); Assert.AreEqual(new Uri("http://ipv4.fiddler:10000/devstoreaccount1-secondary"), account.BlobStorageUri.SecondaryUri); Assert.AreEqual(new Uri("http://ipv4.fiddler:10001/devstoreaccount1-secondary"), account.QueueStorageUri.SecondaryUri); Assert.AreEqual(new Uri("http://ipv4.fiddler:10002/devstoreaccount1-secondary"), account.TableStorageUri.SecondaryUri); Assert.IsNull(account.FileStorageUri); } [TestMethod] [Description("ToString method for DevStore account should not return endpoint info")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevStoreRoundtrip() { string accountString = "UseDevelopmentStorage=true"; Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true)); } [TestMethod] [Description("ToString method for DevStore account with a proxy should not return endpoint info")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDevStoreProxyRoundtrip() { string accountString = "UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://ipv4.fiddler/"; Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true)); } [TestMethod] [Description("ToString method for regular account should return the same connection string")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountDefaultCloudRoundtrip() { string accountString = "DefaultEndpointsProtocol=http;AccountName=test;AccountKey=abc="; Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true)); } [TestMethod] [Description("ToString method for anonymous credentials should return the same connection string")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountAnonymousRoundtrip() { string accountString = "BlobEndpoint=http://blobs/"; Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true)); CloudStorageAccount account = new CloudStorageAccount(null, new Uri("http://blobs/"), null, null, null); AccountsAreEqual(account, CloudStorageAccount.Parse(account.ToString(true))); } [TestMethod] [Description("Exporting account key should be possible both as a byte array and a Base64 encoded string")] [TestCategory(ComponentCategory.Core)] [TestCategory(TestTypeCategory.UnitTest)] [TestCategory(SmokeTestCategory.NonSmoke)] [TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)] public void CloudStorageAccountExportKey() { string accountKeyString = "abc2564="; string accountString = "BlobEndpoint=http://blobs/;AccountName=test;AccountKey=" + accountKeyString; CloudStorageAccount account = CloudStorageAccount.Parse(accountString); StorageCredentials accountAndKey = (StorageCredentials)account.Credentials; string key = accountAndKey.ExportBase64EncodedKey(); Assert.AreEqual(accountKeyString, key); byte[] keyBytes = accountAndKey.ExportKey(); byte[] expectedKeyBytes = Convert.FromBase64String(accountKeyString); for (int i = 0; i < expectedKeyBytes.Length; i++) { Assert.AreEqual(expectedKeyBytes[i], keyBytes[i]); } Assert.AreEqual(expectedKeyBytes.Length, keyBytes.Length); } } }
#region License // Copyright (c) 2010-2019, Mark Final // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of BuildAMation nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion // License namespace Bam.Core { /// <summary> /// Static utility class for configuring and querying OS specific details. /// </summary> public static class OSUtilities { private static System.Collections.Generic.Dictionary<string, StringArray> InstallLocationCache = new System.Collections.Generic.Dictionary<string, StringArray>(); static OSUtilities() { var is64Bit = System.Environment.Is64BitOperatingSystem; if (System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows)) { CurrentOS = EPlatform.Windows; CurrentPlatform = is64Bit ? EPlatform.Win64 : EPlatform.Win32; } else if (System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux)) { CurrentOS = EPlatform.Linux; CurrentPlatform = is64Bit ? EPlatform.Linux64 : EPlatform.Linux32; } else if (System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.OSX)) { CurrentOS = EPlatform.OSX; CurrentPlatform = is64Bit ? EPlatform.OSX64 : EPlatform.OSX32; } else { throw new Exception("Unrecognized platform"); } IsLittleEndian = System.BitConverter.IsLittleEndian; } /// <summary> /// Determines if is Windows the specified platform. /// </summary> /// <returns><c>true</c> if is windows the specified platform; otherwise, <c>false</c>.</returns> /// <param name="platform">Platform.</param> public static bool IsWindows( EPlatform platform) => (EPlatform.Win32 == platform || EPlatform.Win64 == platform); /// <summary> /// Determines if Windows is the current platform. /// </summary> /// <value><c>true</c> if is windows hosting; otherwise, <c>false</c>.</value> public static bool IsWindowsHosting => IsWindows(CurrentPlatform); /// <summary> /// Determines if is Linux the specified platform. /// </summary> /// <returns><c>true</c> if is linux the specified platform; otherwise, <c>false</c>.</returns> /// <param name="platform">Platform.</param> public static bool IsLinux( EPlatform platform) => (EPlatform.Linux32 == platform || EPlatform.Linux64 == platform); /// <summary> /// Determines if Linux is the current platform. /// </summary> /// <value><c>true</c> if is linux hosting; otherwise, <c>false</c>.</value> public static bool IsLinuxHosting => IsLinux(CurrentPlatform); /// <summary> /// Determines if is OSX the specified platform. /// </summary> /// <returns><c>true</c> if is OS the specified platform; otherwise, <c>false</c>.</returns> /// <param name="platform">Platform.</param> public static bool IsOSX( EPlatform platform) => (EPlatform.OSX32 == platform || EPlatform.OSX64 == platform); /// <summary> /// Determines if OSX is the current platform. /// </summary> /// <value><c>true</c> if is OSX hosting; otherwise, <c>false</c>.</value> public static bool IsOSXHosting => IsOSX(CurrentPlatform); /// <summary> /// Determines if the current platform is 64-bits. /// </summary> /// <returns><c>true</c> if is64 bit the specified platform; otherwise, <c>false</c>.</returns> /// <param name="platform">Platform.</param> public static bool Is64Bit( EPlatform platform) => (EPlatform.Win64 == platform || EPlatform.Linux64 == platform || EPlatform.OSX64 == platform); /// <summary> /// Determines if the current OS is 64-bit. /// </summary> /// <value><c>true</c> if is64 bit hosting; otherwise, <c>false</c>.</value> public static bool Is64BitHosting => Is64Bit(CurrentPlatform); /// <summary> /// Determines if the specified platform is supported by the current platform. /// </summary> /// <returns><c>true</c> if is current platform supported the specified supportedPlatforms; otherwise, <c>false</c>.</returns> /// <param name="supportedPlatforms">Supported platforms.</param> public static bool IsCurrentPlatformSupported( EPlatform supportedPlatforms) => (CurrentPlatform == (supportedPlatforms & CurrentPlatform)); /// <summary> /// Get the current platform in terms of the EPlatform enumeration. /// </summary> /// <value>The current OS.</value> public static EPlatform CurrentOS { get; private set; } /// <summary> /// Determine if the current platform is little endian. /// </summary> /// <value><c>true</c> if is little endian; otherwise, <c>false</c>.</value> public static bool IsLittleEndian { get; private set; } /// <summary> /// Retrieve the current platform. /// </summary> /// <value>The current platform.</value> public static EPlatform CurrentPlatform { get; private set; } /// <summary> /// Retrieve the path to 'Program Files'. This is the path where native architecture programs are installed. /// The same path is returned on both 32-bit and 64-bit editions of Windows. /// </summary> public static TokenizedString WindowsProgramFilesPath { get { if (!IsWindowsHosting) { throw new Exception("Only available on Windows"); } return TokenizedString.CreateVerbatim( System.Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles) ); } } /// <summary> /// Retrive the path to 'Program Files (x86)'. This is the path where 32-bit architecture programs are installed. /// On 32-bit Windows, this is the same as WindowsProgramFilesPath. /// On 64-bit Windows, it is a different path. /// </summary> public static TokenizedString WindowsProgramFilesx86Path { get { if (!IsWindowsHosting) { throw new Exception("Only available on Windows"); } var envVar = Is64BitHosting ? System.Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFilesX86) : System.Environment.GetFolderPath(System.Environment.SpecialFolder.ProgramFiles); return TokenizedString.CreateVerbatim(envVar); } } /// <summary> /// Run an executable with a specified set of arguments. /// Will return a result, containing standard output, error and exit code. /// Will throw an exception if the exit code fro the executable is not zero. /// </summary> /// <param name="executable">Executable path to run.</param> /// <param name="arguments">Arguments to pass to the executable.</param> /// <returns>Result of running the executable.</returns> public static RunExecutableResult RunExecutable( string executable, string arguments) { var processStartInfo = new System.Diagnostics.ProcessStartInfo(); processStartInfo.FileName = executable; processStartInfo.Arguments = arguments; processStartInfo.RedirectStandardOutput = true; processStartInfo.RedirectStandardError = true; processStartInfo.RedirectStandardInput = true; processStartInfo.UseShellExecute = false; // to redirect IO streams try { System.Diagnostics.Process process = System.Diagnostics.Process.Start(processStartInfo); process.StandardInput.Close(); string outputBuffer = null; var outputThread = new System.Threading.Thread(() => { outputBuffer = process.StandardOutput.ReadToEnd(); }); outputThread.Start(); string errorBuffer = null; var errorThread = new System.Threading.Thread(() => { errorBuffer = process.StandardError.ReadToEnd(); }); errorThread.Start(); process.WaitForExit(); errorThread.Join(); outputThread.Join(); var result = new RunExecutableResult( outputBuffer?.TrimEnd(System.Environment.NewLine.ToCharArray()), errorBuffer?.TrimEnd(System.Environment.NewLine.ToCharArray()), process.ExitCode ); if (0 != process.ExitCode) { throw new RunExecutableException( result, $"Failed while running '{executable} {arguments}'" ); } return result; } catch (System.ComponentModel.Win32Exception exception) { var result = new RunExecutableResult( "", "", -1 ); throw new RunExecutableException( result, $"Failed while trying to run '{executable} {arguments}' because {exception.Message}" ); } } /// <summary> /// Gets the install location of an executable. /// The PATH is searched initially. /// If not found, on Windows, the x64 (if applicable) and x86 program files directories are recursively /// searched, in that order for the executable. This may be slow. /// If the searchDirectory argument is non-null, this is used instead of the Windows program file /// directories. /// An exception is thrown if it cannot be located in the system. /// Executable locations are cached (thread safe), so that multiple queries for the same /// executable does not need to invoke any external processes. If uniqueName is non-null, then /// this is used as the key in the cache, otherwise the executable name is used. The unique name /// may be useful to save different flavours of the same executable name. /// </summary> /// <returns>The installed locations of the executable (may be more than one). Or null if throwOnFailure is false when no match is found.</returns> /// <param name="executable">Filename of the executable to locate.</param> /// <param name="searchDirectory">Optional directory to search (Windows only).</param> /// <param name="uniqueName">Optional unique name to save as the key in the cache.</param> /// <param name="throwOnFailure">Optional Boolean, defaults to true, to indicate whether an exception is thrown when the executable is not found.</param> public static StringArray GetInstallLocation( string executable, string searchDirectory = null, string uniqueName = null, bool throwOnFailure = true) { lock (InstallLocationCache) { var key = uniqueName ?? executable; if (InstallLocationCache.ContainsKey(key)) { return InstallLocationCache[key]; } string location; try { if (OSUtilities.IsWindowsHosting) { if (null != searchDirectory) { var args = new System.Text.StringBuilder(); args.Append($"/R \"{searchDirectory}\" {executable}"); location = RunExecutable("where", args.ToString()).StandardOutput; } else { try { location = RunExecutable("where", executable).StandardOutput; } catch (RunExecutableException) { var args = new System.Text.StringBuilder(); args.Append($"/R \"{WindowsProgramFilesPath.ToString()}\" {executable}"); try { location = RunExecutable("where", args.ToString()).StandardOutput; } catch (RunExecutableException) { args.Length = 0; args.Capacity = 0; args.Append($"/R \"{WindowsProgramFilesx86Path.ToString()}\" {executable}"); location = RunExecutable("where", args.ToString()).StandardOutput; } } } } else { if (null != searchDirectory) { Log.DebugMessage($"Search path '{searchDirectory}' is ignored on non-Windows platforms"); } location = RunExecutable("which", executable).StandardOutput; } } catch (RunExecutableException exception) { if (throwOnFailure) { throw new Exception(exception, $"Unable to locate '{executable}' in the system."); } else { return null; } } var results = new StringArray( location.Split( new[] { System.Environment.NewLine }, System.StringSplitOptions.RemoveEmptyEntries ) ); InstallLocationCache.Add(key, results); return results; } } } }
// ZlibStream.cs // ------------------------------------------------------------------ // // Copyright (c) 2009 Dino Chiesa and Microsoft Corporation. // All rights reserved. // // This code module is part of DotNetZip, a zipfile class library. // // ------------------------------------------------------------------ // // This code is licensed under the Microsoft Public License. // See the file License.txt for the license details. // More info on: http://dotnetzip.codeplex.com // // ------------------------------------------------------------------ // // last saved (in emacs): // Time-stamp: <2011-July-31 14:53:33> // // ------------------------------------------------------------------ // // This module defines the ZlibStream class, which is similar in idea to // the System.IO.Compression.DeflateStream and // System.IO.Compression.GZipStream classes in the .NET BCL. // // ------------------------------------------------------------------ using System; using System.IO; namespace Ionic.Zlib { /// <summary> /// Represents a Zlib stream for compression or decompression. /// </summary> /// <remarks> /// /// <para> /// The ZlibStream is a <see /// href="http://en.wikipedia.org/wiki/Decorator_pattern">Decorator</see> on a <see /// cref="System.IO.Stream"/>. It adds ZLIB compression or decompression to any /// stream. /// </para> /// /// <para> Using this stream, applications can compress or decompress data via /// stream <c>Read()</c> and <c>Write()</c> operations. Either compresssion or /// decompression can occur through either reading or writing. The compression /// format used is ZLIB, which is documented in <see /// href="http://www.ietf.org/rfc/rfc1950.txt">IETF RFC 1950</see>, "ZLIB Compressed /// Data Format Specification version 3.3". This implementation of ZLIB always uses /// DEFLATE as the compression method. (see <see /// href="http://www.ietf.org/rfc/rfc1951.txt">IETF RFC 1951</see>, "DEFLATE /// Compressed Data Format Specification version 1.3.") </para> /// /// <para> /// The ZLIB format allows for varying compression methods, window sizes, and dictionaries. /// This implementation always uses the DEFLATE compression method, a preset dictionary, /// and 15 window bits by default. /// </para> /// /// <para> /// This class is similar to <see cref="DeflateStream"/>, except that it adds the /// RFC1950 header and trailer bytes to a compressed stream when compressing, or expects /// the RFC1950 header and trailer bytes when decompressing. It is also similar to the /// <see cref="GZipStream"/>. /// </para> /// </remarks> /// <seealso cref="DeflateStream" /> /// <seealso cref="GZipStream" /> public class ZlibStream : System.IO.Stream { internal ZlibBaseStream _baseStream; bool _disposed; /// <summary> /// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>. /// </summary> /// <remarks> /// /// <para> /// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> /// will use the default compression level. The "captive" stream will be /// closed when the <c>ZlibStream</c> is closed. /// </para> /// /// </remarks> /// /// <example> /// This example uses a <c>ZlibStream</c> to compress a file, and writes the /// compressed data to another file. /// <code> /// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress)) /// { /// using (var raw = System.IO.File.Create(fileToCompress + ".zlib")) /// { /// using (Stream compressor = new ZlibStream(raw, CompressionMode.Compress)) /// { /// byte[] buffer = new byte[WORKING_BUFFER_SIZE]; /// int n; /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// compressor.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// <code lang="VB"> /// Using input As Stream = File.OpenRead(fileToCompress) /// Using raw As FileStream = File.Create(fileToCompress &amp; ".zlib") /// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress) /// Dim buffer As Byte() = New Byte(4096) {} /// Dim n As Integer = -1 /// Do While (n &lt;&gt; 0) /// If (n &gt; 0) Then /// compressor.Write(buffer, 0, n) /// End If /// n = input.Read(buffer, 0, buffer.Length) /// Loop /// End Using /// End Using /// End Using /// </code> /// </example> /// /// <param name="stream">The stream which will be read or written.</param> /// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param> public ZlibStream(System.IO.Stream stream, CompressionMode mode) : this(stream, mode, CompressionLevel.Default, false) { } /// <summary> /// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> and /// the specified <c>CompressionLevel</c>. /// </summary> /// /// <remarks> /// /// <para> /// When mode is <c>CompressionMode.Decompress</c>, the level parameter is ignored. /// The "captive" stream will be closed when the <c>ZlibStream</c> is closed. /// </para> /// /// </remarks> /// /// <example> /// This example uses a <c>ZlibStream</c> to compress data from a file, and writes the /// compressed data to another file. /// /// <code> /// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress)) /// { /// using (var raw = System.IO.File.Create(fileToCompress + ".zlib")) /// { /// using (Stream compressor = new ZlibStream(raw, /// CompressionMode.Compress, /// CompressionLevel.BestCompression)) /// { /// byte[] buffer = new byte[WORKING_BUFFER_SIZE]; /// int n; /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// compressor.Write(buffer, 0, n); /// } /// } /// } /// } /// </code> /// /// <code lang="VB"> /// Using input As Stream = File.OpenRead(fileToCompress) /// Using raw As FileStream = File.Create(fileToCompress &amp; ".zlib") /// Using compressor As Stream = New ZlibStream(raw, CompressionMode.Compress, CompressionLevel.BestCompression) /// Dim buffer As Byte() = New Byte(4096) {} /// Dim n As Integer = -1 /// Do While (n &lt;&gt; 0) /// If (n &gt; 0) Then /// compressor.Write(buffer, 0, n) /// End If /// n = input.Read(buffer, 0, buffer.Length) /// Loop /// End Using /// End Using /// End Using /// </code> /// </example> /// /// <param name="stream">The stream to be read or written while deflating or inflating.</param> /// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param> /// <param name="level">A tuning knob to trade speed for effectiveness.</param> public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level) : this(stream, mode, level, false) { } /// <summary> /// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c>, and /// explicitly specify whether the captive stream should be left open after /// Deflation or Inflation. /// </summary> /// /// <remarks> /// /// <para> /// When mode is <c>CompressionMode.Compress</c>, the <c>ZlibStream</c> will use /// the default compression level. /// </para> /// /// <para> /// This constructor allows the application to request that the captive stream /// remain open after the deflation or inflation occurs. By default, after /// <c>Close()</c> is called on the stream, the captive stream is also /// closed. In some cases this is not desired, for example if the stream is a /// <see cref="System.IO.MemoryStream"/> that will be re-read after /// compression. Specify true for the <paramref name="leaveOpen"/> parameter to leave the stream /// open. /// </para> /// /// <para> /// See the other overloads of this constructor for example code. /// </para> /// /// </remarks> /// /// <param name="stream">The stream which will be read or written. This is called the /// "captive" stream in other places in this documentation.</param> /// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param> /// <param name="leaveOpen">true if the application would like the stream to remain /// open after inflation/deflation.</param> public ZlibStream(System.IO.Stream stream, CompressionMode mode, bool leaveOpen) : this(stream, mode, CompressionLevel.Default, leaveOpen) { } /// <summary> /// Create a <c>ZlibStream</c> using the specified <c>CompressionMode</c> /// and the specified <c>CompressionLevel</c>, and explicitly specify /// whether the stream should be left open after Deflation or Inflation. /// </summary> /// /// <remarks> /// /// <para> /// This constructor allows the application to request that the captive /// stream remain open after the deflation or inflation occurs. By /// default, after <c>Close()</c> is called on the stream, the captive /// stream is also closed. In some cases this is not desired, for example /// if the stream is a <see cref="System.IO.MemoryStream"/> that will be /// re-read after compression. Specify true for the <paramref /// name="leaveOpen"/> parameter to leave the stream open. /// </para> /// /// <para> /// When mode is <c>CompressionMode.Decompress</c>, the level parameter is /// ignored. /// </para> /// /// </remarks> /// /// <example> /// /// This example shows how to use a ZlibStream to compress the data from a file, /// and store the result into another file. The filestream remains open to allow /// additional data to be written to it. /// /// <code> /// using (var output = System.IO.File.Create(fileToCompress + ".zlib")) /// { /// using (System.IO.Stream input = System.IO.File.OpenRead(fileToCompress)) /// { /// using (Stream compressor = new ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, true)) /// { /// byte[] buffer = new byte[WORKING_BUFFER_SIZE]; /// int n; /// while ((n= input.Read(buffer, 0, buffer.Length)) != 0) /// { /// compressor.Write(buffer, 0, n); /// } /// } /// } /// // can write additional data to the output stream here /// } /// </code> /// <code lang="VB"> /// Using output As FileStream = File.Create(fileToCompress &amp; ".zlib") /// Using input As Stream = File.OpenRead(fileToCompress) /// Using compressor As Stream = New ZlibStream(output, CompressionMode.Compress, CompressionLevel.BestCompression, True) /// Dim buffer As Byte() = New Byte(4096) {} /// Dim n As Integer = -1 /// Do While (n &lt;&gt; 0) /// If (n &gt; 0) Then /// compressor.Write(buffer, 0, n) /// End If /// n = input.Read(buffer, 0, buffer.Length) /// Loop /// End Using /// End Using /// ' can write additional data to the output stream here. /// End Using /// </code> /// </example> /// /// <param name="stream">The stream which will be read or written.</param> /// /// <param name="mode">Indicates whether the ZlibStream will compress or decompress.</param> /// /// <param name="leaveOpen"> /// true if the application would like the stream to remain open after /// inflation/deflation. /// </param> /// /// <param name="level"> /// A tuning knob to trade speed for effectiveness. This parameter is /// effective only when mode is <c>CompressionMode.Compress</c>. /// </param> public ZlibStream(System.IO.Stream stream, CompressionMode mode, CompressionLevel level, bool leaveOpen) { _baseStream = new ZlibBaseStream(stream, mode, level, ZlibStreamFlavor.ZLIB, leaveOpen); } #region Zlib properties /// <summary> /// This property sets the flush behavior on the stream. /// Sorry, though, not sure exactly how to describe all the various settings. /// </summary> virtual public FlushType FlushMode { get { return (this._baseStream._flushMode); } set { if (_disposed) throw new ObjectDisposedException("ZlibStream"); this._baseStream._flushMode = value; } } /// <summary> /// The size of the working buffer for the compression codec. /// </summary> /// /// <remarks> /// <para> /// The working buffer is used for all stream operations. The default size is /// 1024 bytes. The minimum size is 128 bytes. You may get better performance /// with a larger buffer. Then again, you might not. You would have to test /// it. /// </para> /// /// <para> /// Set this before the first call to <c>Read()</c> or <c>Write()</c> on the /// stream. If you try to set it afterwards, it will throw. /// </para> /// </remarks> public int BufferSize { get { return this._baseStream._bufferSize; } set { if (_disposed) throw new ObjectDisposedException("ZlibStream"); if (this._baseStream._workingBuffer != null) throw new ZlibException("The working buffer is already set."); if (value < ZlibConstants.WorkingBufferSizeMin) throw new ZlibException(String.Format("Don't be silly. {0} bytes?? Use a bigger buffer, at least {1}.", value, ZlibConstants.WorkingBufferSizeMin)); this._baseStream._bufferSize = value; } } /// <summary> Returns the total number of bytes input so far.</summary> virtual public long TotalIn { get { return this._baseStream._z.TotalBytesIn; } } /// <summary> Returns the total number of bytes output so far.</summary> virtual public long TotalOut { get { return this._baseStream._z.TotalBytesOut; } } #endregion #region System.IO.Stream methods /// <summary> /// Dispose the stream. /// </summary> /// <remarks> /// <para> /// This may or may not result in a <c>Close()</c> call on the captive /// stream. See the constructors that have a <c>leaveOpen</c> parameter /// for more information. /// </para> /// <para> /// This method may be invoked in two distinct scenarios. If disposing /// == true, the method has been called directly or indirectly by a /// user's code, for example via the public Dispose() method. In this /// case, both managed and unmanaged resources can be referenced and /// disposed. If disposing == false, the method has been called by the /// runtime from inside the object finalizer and this method should not /// reference other objects; in that case only unmanaged resources must /// be referenced or disposed. /// </para> /// </remarks> /// <param name="disposing"> /// indicates whether the Dispose method was invoked by user code. /// </param> protected override void Dispose(bool disposing) { try { if (!_disposed) { if (disposing && (this._baseStream != null)) this._baseStream.Close(); _disposed = true; } } finally { base.Dispose(disposing); } } /// <summary> /// Indicates whether the stream can be read. /// </summary> /// <remarks> /// The return value depends on whether the captive stream supports reading. /// </remarks> public override bool CanRead { get { if (_disposed) throw new ObjectDisposedException("ZlibStream"); return _baseStream._stream.CanRead; } } /// <summary> /// Indicates whether the stream supports Seek operations. /// </summary> /// <remarks> /// Always returns false. /// </remarks> public override bool CanSeek { get { return false; } } /// <summary> /// Indicates whether the stream can be written. /// </summary> /// <remarks> /// The return value depends on whether the captive stream supports writing. /// </remarks> public override bool CanWrite { get { if (_disposed) throw new ObjectDisposedException("ZlibStream"); return _baseStream._stream.CanWrite; } } /// <summary> /// Flush the stream. /// </summary> public override void Flush() { if (_disposed) throw new ObjectDisposedException("ZlibStream"); _baseStream.Flush(); } /// <summary> /// Reading this property always throws a <see cref="NotSupportedException"/>. /// </summary> public override long Length { get { throw new NotSupportedException(); } } /// <summary> /// The position of the stream pointer. /// </summary> /// /// <remarks> /// Setting this property always throws a <see /// cref="NotSupportedException"/>. Reading will return the total bytes /// written out, if used in writing, or the total bytes read in, if used in /// reading. The count may refer to compressed bytes or uncompressed bytes, /// depending on how you've used the stream. /// </remarks> public override long Position { get { if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Writer) return this._baseStream._z.TotalBytesOut; if (this._baseStream._streamMode == Ionic.Zlib.ZlibBaseStream.StreamMode.Reader) return this._baseStream._z.TotalBytesIn; return 0; } set { throw new NotSupportedException(); } } /// <summary> /// Read data from the stream. /// </summary> /// /// <remarks> /// /// <para> /// If you wish to use the <c>ZlibStream</c> to compress data while reading, /// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>, /// providing an uncompressed data stream. Then call <c>Read()</c> on that /// <c>ZlibStream</c>, and the data read will be compressed. If you wish to /// use the <c>ZlibStream</c> to decompress data while reading, you can create /// a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, providing a /// readable compressed data stream. Then call <c>Read()</c> on that /// <c>ZlibStream</c>, and the data will be decompressed as it is read. /// </para> /// /// <para> /// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but /// not both. /// </para> /// /// </remarks> /// /// <param name="buffer"> /// The buffer into which the read data should be placed.</param> /// /// <param name="offset"> /// the offset within that data array to put the first byte read.</param> /// /// <param name="count">the number of bytes to read.</param> /// /// <returns>the number of bytes read</returns> public override int Read(byte[] buffer, int offset, int count) { if (_disposed) throw new ObjectDisposedException("ZlibStream"); return _baseStream.Read(buffer, offset, count); } /// <summary> /// Calling this method always throws a <see cref="NotSupportedException"/>. /// </summary> /// <param name="offset"> /// The offset to seek to.... /// IF THIS METHOD ACTUALLY DID ANYTHING. /// </param> /// <param name="origin"> /// The reference specifying how to apply the offset.... IF /// THIS METHOD ACTUALLY DID ANYTHING. /// </param> /// /// <returns>nothing. This method always throws.</returns> public override long Seek(long offset, System.IO.SeekOrigin origin) { throw new NotSupportedException(); } /// <summary> /// Calling this method always throws a <see cref="NotSupportedException"/>. /// </summary> /// <param name="value"> /// The new value for the stream length.... IF /// THIS METHOD ACTUALLY DID ANYTHING. /// </param> public override void SetLength(long value) { throw new NotSupportedException(); } /// <summary> /// Write data to the stream. /// </summary> /// /// <remarks> /// /// <para> /// If you wish to use the <c>ZlibStream</c> to compress data while writing, /// you can create a <c>ZlibStream</c> with <c>CompressionMode.Compress</c>, /// and a writable output stream. Then call <c>Write()</c> on that /// <c>ZlibStream</c>, providing uncompressed data as input. The data sent to /// the output stream will be the compressed form of the data written. If you /// wish to use the <c>ZlibStream</c> to decompress data while writing, you /// can create a <c>ZlibStream</c> with <c>CompressionMode.Decompress</c>, and a /// writable output stream. Then call <c>Write()</c> on that stream, /// providing previously compressed data. The data sent to the output stream /// will be the decompressed form of the data written. /// </para> /// /// <para> /// A <c>ZlibStream</c> can be used for <c>Read()</c> or <c>Write()</c>, but not both. /// </para> /// </remarks> /// <param name="buffer">The buffer holding data to write to the stream.</param> /// <param name="offset">the offset within that data array to find the first byte to write.</param> /// <param name="count">the number of bytes to write.</param> public override void Write(byte[] buffer, int offset, int count) { if (_disposed) throw new ObjectDisposedException("ZlibStream"); _baseStream.Write(buffer, offset, count); } #endregion /// <summary> /// Compress a string into a byte array using ZLIB. /// </summary> /// /// <remarks> /// Uncompress it with <see cref="ZlibStream.UncompressString(byte[])"/>. /// </remarks> /// /// <seealso cref="ZlibStream.UncompressString(byte[])"/> /// <seealso cref="ZlibStream.CompressBuffer(byte[])"/> /// <seealso cref="GZipStream.CompressString(string)"/> /// /// <param name="s"> /// A string to compress. The string will first be encoded /// using UTF-8, then compressed. /// </param> /// /// <returns>The string in compressed form</returns> public static byte[] CompressString(String s) { using (var ms = new MemoryStream()) { Stream compressor = new ZlibStream(ms, CompressionMode.Compress, CompressionLevel.BestCompression); ZlibBaseStream.CompressString(s, compressor); return ms.ToArray(); } } /// <summary> /// Compress a byte array into a new byte array using ZLIB. /// </summary> /// /// <remarks> /// Uncompress it with <see cref="ZlibStream.UncompressBuffer(byte[])"/>. /// </remarks> /// /// <seealso cref="ZlibStream.CompressString(string)"/> /// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/> /// /// <param name="b"> /// A buffer to compress. /// </param> /// /// <returns>The data in compressed form</returns> public static byte[] CompressBuffer(byte[] b) { using (var ms = new MemoryStream()) { Stream compressor = new ZlibStream( ms, CompressionMode.Compress, CompressionLevel.BestCompression ); ZlibBaseStream.CompressBuffer(b, compressor); return ms.ToArray(); } } /// <summary> /// Uncompress a ZLIB-compressed byte array into a single string. /// </summary> /// /// <seealso cref="ZlibStream.CompressString(String)"/> /// <seealso cref="ZlibStream.UncompressBuffer(byte[])"/> /// /// <param name="compressed"> /// A buffer containing ZLIB-compressed data. /// </param> /// /// <returns>The uncompressed string</returns> public static String UncompressString(byte[] compressed) { using (var input = new MemoryStream(compressed)) { Stream decompressor = new ZlibStream(input, CompressionMode.Decompress); return ZlibBaseStream.UncompressString(compressed, decompressor); } } /// <summary> /// Uncompress a ZLIB-compressed byte array into a byte array. /// </summary> /// /// <seealso cref="ZlibStream.CompressBuffer(byte[])"/> /// <seealso cref="ZlibStream.UncompressString(byte[])"/> /// /// <param name="compressed"> /// A buffer containing ZLIB-compressed data. /// </param> /// /// <returns>The data in uncompressed form</returns> public static byte[] UncompressBuffer(byte[] compressed) { using (var input = new MemoryStream(compressed)) { Stream decompressor = new ZlibStream( input, CompressionMode.Decompress ); return ZlibBaseStream.UncompressBuffer(compressed, decompressor); } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Controllers; using System.Web.Http.Description; using WebServiceMarvel.Areas.HelpPage.ModelDescriptions; using WebServiceMarvel.Areas.HelpPage.Models; namespace WebServiceMarvel.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> public static void SetSampleForMediaType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model description generator. /// </summary> /// <param name="config">The configuration.</param> /// <returns>The <see cref="ModelDescriptionGenerator"/></returns> public static ModelDescriptionGenerator GetModelDescriptionGenerator(this HttpConfiguration config) { return (ModelDescriptionGenerator)config.Properties.GetOrAdd( typeof(ModelDescriptionGenerator), k => InitializeModelDescriptionGenerator(config)); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { model = GenerateApiModel(apiDescription, config); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HttpConfiguration config) { HelpPageApiModel apiModel = new HelpPageApiModel() { ApiDescription = apiDescription, }; ModelDescriptionGenerator modelGenerator = config.GetModelDescriptionGenerator(); HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); GenerateUriParameters(apiModel, modelGenerator); GenerateRequestModelDescription(apiModel, modelGenerator, sampleGenerator); GenerateResourceDescription(apiModel, modelGenerator); GenerateSamples(apiModel, sampleGenerator); return apiModel; } private static void GenerateUriParameters(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromUri) { HttpParameterDescriptor parameterDescriptor = apiParameter.ParameterDescriptor; Type parameterType = null; ModelDescription typeDescription = null; ComplexTypeModelDescription complexTypeDescription = null; if (parameterDescriptor != null) { parameterType = parameterDescriptor.ParameterType; typeDescription = modelGenerator.GetOrCreateModelDescription(parameterType); complexTypeDescription = typeDescription as ComplexTypeModelDescription; } // Example: // [TypeConverter(typeof(PointConverter))] // public class Point // { // public Point(int x, int y) // { // X = x; // Y = y; // } // public int X { get; set; } // public int Y { get; set; } // } // Class Point is bindable with a TypeConverter, so Point will be added to UriParameters collection. // // public class Point // { // public int X { get; set; } // public int Y { get; set; } // } // Regular complex class Point will have properties X and Y added to UriParameters collection. if (complexTypeDescription != null && !IsBindableWithTypeConverter(parameterType)) { foreach (ParameterDescription uriParameter in complexTypeDescription.Properties) { apiModel.UriParameters.Add(uriParameter); } } else if (parameterDescriptor != null) { ParameterDescription uriParameter = AddParameterDescription(apiModel, apiParameter, typeDescription); if (!parameterDescriptor.IsOptional) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Required" }); } object defaultValue = parameterDescriptor.DefaultValue; if (defaultValue != null) { uriParameter.Annotations.Add(new ParameterAnnotation() { Documentation = "Default value is " + Convert.ToString(defaultValue, CultureInfo.InvariantCulture) }); } } else { Debug.Assert(parameterDescriptor == null); // If parameterDescriptor is null, this is an undeclared route parameter which only occurs // when source is FromUri. Ignored in request model and among resource parameters but listed // as a simple string here. ModelDescription modelDescription = modelGenerator.GetOrCreateModelDescription(typeof(string)); AddParameterDescription(apiModel, apiParameter, modelDescription); } } } } private static bool IsBindableWithTypeConverter(Type parameterType) { if (parameterType == null) { return false; } return TypeDescriptor.GetConverter(parameterType).CanConvertFrom(typeof(string)); } private static ParameterDescription AddParameterDescription(HelpPageApiModel apiModel, ApiParameterDescription apiParameter, ModelDescription typeDescription) { ParameterDescription parameterDescription = new ParameterDescription { Name = apiParameter.Name, Documentation = apiParameter.Documentation, TypeDescription = typeDescription, }; apiModel.UriParameters.Add(parameterDescription); return parameterDescription; } private static void GenerateRequestModelDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator, HelpPageSampleGenerator sampleGenerator) { ApiDescription apiDescription = apiModel.ApiDescription; foreach (ApiParameterDescription apiParameter in apiDescription.ParameterDescriptions) { if (apiParameter.Source == ApiParameterSource.FromBody) { Type parameterType = apiParameter.ParameterDescriptor.ParameterType; apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); apiModel.RequestDocumentation = apiParameter.Documentation; } else if (apiParameter.ParameterDescriptor != null && apiParameter.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage)) { Type parameterType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); if (parameterType != null) { apiModel.RequestModelDescription = modelGenerator.GetOrCreateModelDescription(parameterType); } } } } private static void GenerateResourceDescription(HelpPageApiModel apiModel, ModelDescriptionGenerator modelGenerator) { ResponseDescription response = apiModel.ApiDescription.ResponseDescription; Type responseType = response.ResponseType ?? response.DeclaredType; if (responseType != null && responseType != typeof(void)) { apiModel.ResourceDescription = modelGenerator.GetOrCreateModelDescription(responseType); } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static void GenerateSamples(HelpPageApiModel apiModel, HelpPageSampleGenerator sampleGenerator) { try { foreach (var item in sampleGenerator.GetSampleRequests(apiModel.ApiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiModel.ApiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception message: {0}", HelpPageSampleGenerator.UnwrapException(e).Message)); } } private static bool TryGetResourceParameter(ApiDescription apiDescription, HttpConfiguration config, out ApiParameterDescription parameterDescription, out Type resourceType) { parameterDescription = apiDescription.ParameterDescriptions.FirstOrDefault( p => p.Source == ApiParameterSource.FromBody || (p.ParameterDescriptor != null && p.ParameterDescriptor.ParameterType == typeof(HttpRequestMessage))); if (parameterDescription == null) { resourceType = null; return false; } resourceType = parameterDescription.ParameterDescriptor.ParameterType; if (resourceType == typeof(HttpRequestMessage)) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); resourceType = sampleGenerator.ResolveHttpRequestMessageType(apiDescription); } if (resourceType == null) { parameterDescription = null; return false; } return true; } private static ModelDescriptionGenerator InitializeModelDescriptionGenerator(HttpConfiguration config) { ModelDescriptionGenerator modelGenerator = new ModelDescriptionGenerator(config); Collection<ApiDescription> apis = config.Services.GetApiExplorer().ApiDescriptions; foreach (ApiDescription api in apis) { ApiParameterDescription parameterDescription; Type parameterType; if (TryGetResourceParameter(api, config, out parameterDescription, out parameterType)) { modelGenerator.GetOrCreateModelDescription(parameterType); } } return modelGenerator; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
namespace Tests.SharpArch.Web.Mvc.ModelBinder { using System; using System.Collections.Generic; using System.Collections.Specialized; using System.Web.Mvc; using Castle.MicroKernel.Registration; using Castle.Windsor; using CommonServiceLocator.WindsorAdapter; using Moq; using global::SharpArch.Domain.DomainModel; using global::SharpArch.Domain.PersistenceSupport; using global::SharpArch.Web.Mvc.ModelBinder; using Microsoft.Practices.ServiceLocation; using NUnit.Framework; [TestFixture] public class SharpModelBinderTests { [Test] public void CanBindModelWithCollection() { var id = 2; var employeeName = "Michael"; // Arrange var formCollection = new NameValueCollection { { "Employee.Id", id.ToString() }, { "Employee.Name", employeeName }, { "Employee.Reports", "3" }, { "Employee.Reports", "4" }, { "Employee.Manager", "12" } }; var valueProvider = new NameValueCollectionValueProvider(formCollection, null); var modelMetadata = ModelMetadataProviders.Current.GetMetadataForType(null, typeof(Employee)); var bindingContext = new ModelBindingContext { ModelName = "Employee", ValueProvider = valueProvider, ModelMetadata = modelMetadata }; DefaultModelBinder target = new SharpModelBinder(); var controllerContext = new ControllerContext(); // Act var result = (Employee)target.BindModel(controllerContext, bindingContext); // Assert Assert.AreEqual(id, result.Id); Assert.AreEqual(employeeName, result.Name); Assert.AreEqual(2, result.Reports.Count); } [Test] public void CanBindModelWithEntityCollection() { var id = 2; var employeeName = "Michael"; // Arrange var formCollection = new NameValueCollection { { "Employee.Id", id.ToString() }, { "Employee.Name", employeeName }, { "Employee.Reports[0].Name", "Michael" }, { "Employee.Reports[1].Name", "Alec" }, { "Employee.Manager", "12" } }; var valueProvider = new NameValueCollectionValueProvider(formCollection, null); var modelMetadata = ModelMetadataProviders.Current.GetMetadataForType(null, typeof(Employee)); var bindingContext = new ModelBindingContext { ModelName = "Employee", ValueProvider = valueProvider, ModelMetadata = modelMetadata }; DefaultModelBinder target = new SharpModelBinder(); var controllerContext = new ControllerContext(); // Act var result = (Employee)target.BindModel(controllerContext, bindingContext); // Assert Assert.AreEqual(id, result.Id); Assert.AreEqual(employeeName, result.Name); Assert.GreaterOrEqual(result.Reports.Count, 2); } [Test] public void CanBindModelWithNestedEntities() { var id = 2; var employeeName = "Michael"; var managerName = "Tobias"; var managerManagerName = "Scott"; // Arrange var formCollection = new NameValueCollection { { "Employee.Id", id.ToString() }, { "Employee.Name", employeeName }, { "Employee.Manager.Name", managerName }, { "Employee.Manager.Manager.Name", managerManagerName } }; var valueProvider = new NameValueCollectionValueProvider(formCollection, null); var modelMetadata = ModelMetadataProviders.Current.GetMetadataForType(null, typeof(Employee)); var bindingContext = new ModelBindingContext { ModelName = "Employee", ValueProvider = valueProvider, ModelMetadata = modelMetadata }; DefaultModelBinder target = new SharpModelBinder(); var controllerContext = new ControllerContext(); // Act var result = (Employee)target.BindModel(controllerContext, bindingContext); // Assert Assert.AreEqual(id, result.Id); Assert.AreEqual(employeeName, result.Name); Assert.AreEqual(managerName, result.Manager.Name); Assert.AreEqual(managerManagerName, result.Manager.Manager.Name); } [Test] public void CanBindSimpleModel() { var id = 2; var employeeName = "Michael"; // Arrange var formCollection = new NameValueCollection { { "Employee.Id", id.ToString() }, { "Employee.Name", employeeName }, }; var valueProvider = new NameValueCollectionValueProvider(formCollection, null); var modelMetadata = ModelMetadataProviders.Current.GetMetadataForType(null, typeof(Employee)); var bindingContext = new ModelBindingContext { ModelName = "Employee", ValueProvider = valueProvider, ModelMetadata = modelMetadata }; DefaultModelBinder target = new SharpModelBinder(); var controllerContext = new ControllerContext(); // Act var result = (Employee)target.BindModel(controllerContext, bindingContext); // Assert Assert.AreEqual(id, result.Id); Assert.AreEqual(employeeName, result.Name); } [Test] public void CanBindSimpleModelWithGuidId() { var id = new Guid(); var territoryName = "Someplace, USA"; // Arrange var formCollection = new NameValueCollection { { "Territory.Id", id.ToString() }, { "Territory.Name", territoryName }, }; var valueProvider = new NameValueCollectionValueProvider(formCollection, null); var modelMetadata = ModelMetadataProviders.Current.GetMetadataForType(null, typeof(Territory)); var bindingContext = new ModelBindingContext { ModelName = "Territory", ValueProvider = valueProvider, ModelMetadata = modelMetadata }; DefaultModelBinder target = new SharpModelBinder(); var controllerContext = new ControllerContext(); // Act var result = (Territory)target.BindModel(controllerContext, bindingContext); // Assert Assert.AreEqual(id, result.Id); Assert.AreEqual(territoryName, result.Name); } [Test] public void CanBindSimpleModelWithGuidIdAndNullValue() { var territoryName = "Someplace, USA"; // Arrange var formCollection = new NameValueCollection { { "Territory.Id", string.Empty }, { "Territory.Name", territoryName }, }; var valueProvider = new NameValueCollectionValueProvider(formCollection, null); var modelMetadata = ModelMetadataProviders.Current.GetMetadataForType(null, typeof(Territory)); var bindingContext = new ModelBindingContext { ModelName = "Territory", ValueProvider = valueProvider, ModelMetadata = modelMetadata }; DefaultModelBinder target = new SharpModelBinder(); var controllerContext = new ControllerContext(); // Act var result = (Territory)target.BindModel(controllerContext, bindingContext); // Assert Assert.AreEqual(territoryName, result.Name); } [TestFixtureSetUp] public void SetUp() { var mockRepository = new Mock<IRepositoryWithTypedId<Employee, int>>(); var windsorContainer = new WindsorContainer(); mockRepository.Setup(r => r.Get(It.IsAny<int>())).Returns((int newId) => new Employee(newId)); windsorContainer.Register( Component .For<IRepositoryWithTypedId<Employee, int>>() .Instance(mockRepository.Object)); ServiceLocator.SetLocatorProvider(() => new WindsorServiceLocator(windsorContainer)); } public class Employee : Entity { public Employee() { this.Reports = new List<Employee>(); } public Employee(int id) { this.Id = id; } public Employee Manager { get; set; } public string Name { get; set; } public IList<Employee> Reports { get; protected set; } } public class Territory : EntityWithTypedId<Guid> { public string Name { get; set; } } } }
// // //This file contains an implementation of the MD4 hash algorithm. //The implementation was derived from the example source code that //is provided in RFC 1320 - "The MD4 Message-Digest Algorithm". // //The code has been ported from C to C#. //The original copyright is preserved here. // // // Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All // rights reserved. // // License to copy and use this software is granted provided that it // is identified as the "RSA Data Security, Inc. MD4 Message-Digest // Algorithm" in all material mentioning or referencing this software // or this function. // // License is also granted to make and use derivative works provided // that such works are identified as "derived from the RSA Data // Security, Inc. MD4 Message-Digest Algorithm" in all material // mentioning or referencing the derived work. // // RSA Data Security, Inc. makes no representations concerning either // the merchantability of this software or the suitability of this // software for any particular purpose. It is provided "as is" // without express or implied warranty of any kind. // // These notices must be retained in any copies of any part of this // documentation and/or software. // // using System; using System.Diagnostics; using System.Text; namespace System.Security.Cryptography { /// <summary> /// This class implements the MD4 message digest algorithm. /// The implementation is a C# port of the original RSA C implementation that is /// provided in RFC 1320. /// </summary> public class MD4Context { public MD4Context() { Reset(); } public const int DigestLength = 16; /// <summary> /// This field is "true" if GetDigest has been called. /// It cannot be called again, unless Reset is called. /// </summary> bool _done; public void Clear() { _state0 = 0; _state1 = 0; _state2 = 0; _state3 = 0; _count = 0; for (int i = 0; i < 64; i++) _buffer[i] = 0; } /// <summary> /// This method can be used to reset the state of the MD4 context. After returning, the /// state of the MD4Context is equivalent to its state immediately after the constructor /// finished. /// </summary> public void Reset() { // Load magic initialization constants. _count = 0; _state0 = 0x67452301; _state1 = 0xefcdab89; _state2 = 0x98badcfe; _state3 = 0x10325476; _done = false; } // //MD4 basic transformation. Transforms state based on block. // void Transform(byte[]/*!*/ block, int offset) { if (offset < 0) throw new ArgumentException("offset cannot be negative."); Decode(_block, 0, block, offset, 64); Transform(_block); // Zeroize sensitive information. // MD4_memset ((POINTER)x, 0, sizeof (x)); } #region Constants for MD4Transform routine. const int S11 = 3; const int S12 = 7; const int S13 = 11; const int S14 = 19; const int S21 = 3; const int S22 = 5; const int S23 = 9; const int S24 = 13; const int S31 = 3; const int S32 = 9; const int S33 = 11; const int S34 = 15; #endregion /// <summary> /// This routine transforms the current MD4 context, using a block of input. /// The input is 16 words, where each word is 32 unsigned bits. This method /// is the core of the MD4 algorithm. /// </summary> /// <param name="x">The message data to use to transform the context.</param> void Transform(uint[] x) { uint a = _state0; uint b = _state1; uint c = _state2; uint d = _state3; // Round 1 a = FF(a, b, c, d, x[00], S11); // 1 d = FF(d, a, b, c, x[01], S12); // 2 c = FF(c, d, a, b, x[02], S13); // 3 b = FF(b, c, d, a, x[03], S14); // 4 a = FF(a, b, c, d, x[04], S11); // 5 d = FF(d, a, b, c, x[05], S12); // 6 c = FF(c, d, a, b, x[06], S13); // 7 b = FF(b, c, d, a, x[07], S14); // 8 a = FF(a, b, c, d, x[08], S11); // 9 d = FF(d, a, b, c, x[09], S12); // 10 c = FF(c, d, a, b, x[10], S13); // 11 b = FF(b, c, d, a, x[11], S14); // 12 a = FF(a, b, c, d, x[12], S11); // 13 d = FF(d, a, b, c, x[13], S12); // 14 c = FF(c, d, a, b, x[14], S13); // 15 b = FF(b, c, d, a, x[15], S14); // 16 // Round 2 a = GG(a, b, c, d, x[00], S21); // 17 d = GG(d, a, b, c, x[04], S22); // 18 c = GG(c, d, a, b, x[08], S23); // 19 b = GG(b, c, d, a, x[12], S24); // 20 a = GG(a, b, c, d, x[01], S21); // 21 d = GG(d, a, b, c, x[05], S22); // 22 c = GG(c, d, a, b, x[09], S23); // 23 b = GG(b, c, d, a, x[13], S24); // 24 a = GG(a, b, c, d, x[02], S21); // 25 d = GG(d, a, b, c, x[06], S22); // 26 c = GG(c, d, a, b, x[10], S23); // 27 b = GG(b, c, d, a, x[14], S24); // 28 a = GG(a, b, c, d, x[03], S21); // 29 d = GG(d, a, b, c, x[07], S22); // 30 c = GG(c, d, a, b, x[11], S23); // 31 b = GG(b, c, d, a, x[15], S24); // 32 // Round 3 a = HH(a, b, c, d, x[00], S31); // 33 d = HH(d, a, b, c, x[08], S32); // 34 c = HH(c, d, a, b, x[04], S33); // 35 b = HH(b, c, d, a, x[12], S34); // 36 a = HH(a, b, c, d, x[02], S31); // 37 d = HH(d, a, b, c, x[10], S32); // 38 c = HH(c, d, a, b, x[06], S33); // 39 b = HH(b, c, d, a, x[14], S34); // 40 a = HH(a, b, c, d, x[01], S31); // 41 d = HH(d, a, b, c, x[09], S32); // 42 c = HH(c, d, a, b, x[05], S33); // 43 b = HH(b, c, d, a, x[13], S34); // 44 a = HH(a, b, c, d, x[03], S31); // 45 d = HH(d, a, b, c, x[11], S32); // 46 c = HH(c, d, a, b, x[07], S33); // 47 b = HH(b, c, d, a, x[15], S34); // 48 _state0 = unchecked(_state0 + a); _state1 = unchecked(_state1 + b); _state2 = unchecked(_state2 + c); _state3 = unchecked(_state3 + d); } public const int BytesPerTransform = 64; public const int WordsPerTransform = 16; uint _state0; // state A uint _state1; // state B uint _state2; // state C uint _state3; // state D Int64 _count; // number of bits, modulo 2^64 /// <summary> /// This buffer holds data that was submitted using the Update method, but which /// was too short to complete a full transform block (64 bytes). /// </summary> readonly byte[]/*!*/ _buffer = new byte[BytesPerTransform]; /// <summary> /// This buffer contains the current block (of message data) that is being transformed. /// </summary> readonly uint[]/*!*/ _block = new uint[WordsPerTransform]; static MD4Context() { PADDING = new byte[BytesPerTransform]; Array.Clear(PADDING, 0, 64); PADDING[0] = 0x80; } static readonly byte[]/*!*/ PADDING; // //{ // 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 //}; // // F, G and H are basic MD4 functions. // static uint F(uint x, uint y, uint z) { return (x & y) | ((~x) & z); } static uint G(uint x, uint y, uint z) { return (x & y) | (x & z) | (y & z); } static uint H(uint x, uint y, uint z) { return x ^ y ^ z; } // ROTATE_LEFT rotates x left n bits. // static uint RotateLeft(uint x, int n) { return (x << n) | (x >> (32 - n)); } // FF, GG and HH are transformations for rounds 1, 2 and 3 // Rotation is separate from addition to prevent recomputation // (Or at least it used to be, when these were C macros. static uint FF(uint a, uint b, uint c, uint d, uint x, int s) { a = unchecked(a + F(b, c, d) + x); return RotateLeft(a, s); } static uint GG(uint a, uint b, uint c, uint d, uint x, int s) { a = unchecked(a + G(b, c, d) + x + (uint)0x5a827999); return RotateLeft(a, s); } static uint HH(uint a, uint b, uint c, uint d, uint x, int s) { a = unchecked(a + H(b, c, d) + x + (uint)0x6ed9eba1); return RotateLeft(a, s); } // Decodes input (unsigned char) into output (uint). Assumes len is // a multiple of 4. // // /// <summary> /// /// </summary> /// <param name="output"></param> /// <param name="outputoffset"></param> /// <param name="input"></param> /// <param name="inputoffset"></param> /// <param name="length">number of BYTES to transfer</param> static void Decode(uint[]/*!*/ output, int outputoffset, byte[]/*!*/ input, int inputoffset, int length) { Debug.Assert(length % 4 == 0); int outpos = outputoffset; for (int j = 0; j < length; j += 4) { uint value = ((uint)input[inputoffset + j]) | (((uint)input[inputoffset + j + 1]) << 8) | (((uint)input[inputoffset + j + 2]) << 16) | (((uint)input[inputoffset + j + 3]) << 24); output[outpos] = value; outpos++; } } static void EncodeLe(uint value, byte[]/*!*/ output, int pos) { output[pos] = (byte)(value & 0xff); output[pos + 1] = (byte)((value >> 8) & 0xff); output[pos + 2] = (byte)((value >> 16) & 0xff); output[pos + 3] = (byte)((value >> 24) & 0xff); } /// <summary> /// MD4 block update operation. Continues an MD4 message-digest operation, /// processing another message block, and updating the context. /// </summary> /// <param name="input">The buffer containing data to process.</param> /// <param name="offset">The offset within the buffer where the data begins.</param> /// <param name="length">The length of the data.</param> public void Update(byte[]/*!*/ input, int offset, int length) { if (_done) throw new InvalidOperationException("The hash context has been closed (GetDigest has been called). It cannot be reused until Reset is called."); if (length == 0) return; // Compute number of bytes mod 64 int index = (int)((_count >> 3) & 0x3F); _count += length << 3; int partLen = 64 - index; // Transform as many times as possible. // int i; if (length >= partLen) { // MD4_memcpy((POINTER)&buffer_pinned[index], (POINTER)&input[offset], partLen); Array.Copy(input, offset, _buffer, index, partLen); Transform(_buffer, 0); for (i = partLen; i + 63 < length; i += 64) Transform(input, offset + i); index = 0; } else i = 0; // Buffer remaining input // MD4_memcpy((POINTER)&_buffer[index], (POINTER)&input[i], inputLen-i); // MD4_memcpy((POINTER)&buffer_pinned[index], (POINTER)&input[offset + i], inputLen-i); Array.Copy(input, offset + i, _buffer, index, length - i); } public static MD4Digest GetDigest(byte[]/*!*/ buffer) { return GetDigest(buffer, 0, buffer.Length); } public static MD4Digest GetDigest(byte[]/*!*/ buffer, int offset, int length) { MD4Context context = new MD4Context(); context.Update(buffer, offset, length); return context.GetDigest(); } // MD4 finalization. Ends an MD4 message-digest operation, writing the // the message digest and zeroizing the context. // public MD4Digest GetDigest() { if (_done) throw new InvalidOperationException("GetDigest cannot be called twice for a single hash sequence. Call Reset() to reset the context."); byte[]/*!*/ bits = new byte[8]; // Save number of bits EncodeLe((uint)(_count & 0xffffffffu), bits, 0); EncodeLe((uint)(_count >> 32), bits, 4); // Pad out to 56 mod 64. int index = (int)((_count >> 3) & 0x3f); int padLen = (index < 56) ? (56 - index) : (120 - index); Update(PADDING, 0, padLen); // Append length (before padding) Update(bits, 0, 8); // Store state in digest MD4Digest digest; digest.state0 = _state0; digest.state1 = _state1; digest.state2 = _state2; digest.state3 = _state3; Clear(); _done = true; return digest; } } public struct MD4Digest { public uint state0; public uint state1; public uint state2; public uint state3; static void PackLe(byte[]/*!*/ dest, int offset, uint value) { dest[offset + 0] = (byte)((value) & 0xff); dest[offset + 1] = (byte)((value >> 8) & 0xff); dest[offset + 2] = (byte)((value >> 16) & 0xff); dest[offset + 3] = (byte)((value >> 24) & 0xff); } public const int DigestLength = 16; public byte[]/*!*/ ToArray() { byte[] result = new byte[DigestLength]; ToArray(result); return result; } public void ToArray(byte[]/*!*/ result) { if (result.Length < 0x10) throw new ArgumentException("Input array is too short."); PackLe(result, 0, state0); PackLe(result, 4, state1); PackLe(result, 8, state2); PackLe(result, 12, state3); } public static implicit operator byte[]/*!*/(MD4Digest digest) { return digest.ToArray(); } override public string/*!*/ ToString() { string hex = "0123456789abcdef"; byte[] arr = ToArray(); StringBuilder buffer = new StringBuilder(DigestLength * 2); for (int i = 0; i < 0x10; i++) { byte b = arr[i]; buffer.Append(hex[b >> 4]); buffer.Append(hex[b & 0xf]); } return buffer.ToString(); } } }
/* Copyright 2019 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using ArcGIS.Desktop.Framework; using ArcGIS.Desktop.Framework.Contracts; using ArcGIS.Desktop.Mapping.Controls; using ArcGIS.Desktop.Mapping; using ArcGIS.Core.CIM; using ArcGIS.Desktop.Mapping.Events; using ArcGIS.Desktop.Framework.Threading.Tasks; using System.Windows.Input; namespace QueryBuilderControl { internal class DefinitionQueryDockPaneViewModel : DockPane { private const string _dockPaneID = "QueryBuilderControl_DefinitionQueryDockPane"; private string _origExpression; protected DefinitionQueryDockPaneViewModel() { } /// <summary> /// Show the DockPane. /// </summary> internal static void Show() { DockPane pane = FrameworkApplication.DockPaneManager.Find(_dockPaneID); if (pane == null) return; pane.Activate(); var vm = pane as DefinitionQueryDockPaneViewModel; if (vm != null && MapView.Active != null) { vm.ClearControlProperties(); vm.BuildControlProperties(MapView.Active); } } private bool _subscribed = false; /// <summary> /// When visibility of dockpane changes, subscribe or unsubscribe from events. /// </summary> /// <param name="isVisible">is the dockpane visible?</param> protected override void OnShow(bool isVisible) { if (isVisible) { if (!_subscribed) { _subscribed = true; // connect to events ArcGIS.Desktop.Mapping.Events.TOCSelectionChangedEvent.Subscribe(OnSelectedLayersChanged); ArcGIS.Desktop.Core.Events.ProjectClosingEvent.Subscribe(OnProjectClosing); } } else { if (_subscribed) { _subscribed = false; // unsubscribe from events ArcGIS.Desktop.Mapping.Events.TOCSelectionChangedEvent.Unsubscribe(OnSelectedLayersChanged); ArcGIS.Desktop.Core.Events.ProjectClosingEvent.Unsubscribe(OnProjectClosing); } } base.OnShow(isVisible); } #region Properties /// <summary> /// Gets and sets the QueryBuilderControlProperties to bind to the QueryBuilderControl. /// </summary> private QueryBuilderControlProperties _props = null; public QueryBuilderControlProperties ControlProperties { get { return _props; } set { SetProperty(ref _props, value); } } /// <summary> /// Gets and sets the query expression in the QueryBuilderControl. /// </summary> private string _expression = string.Empty; public string Expression { get { return _expression; } set { _expression = value; } // doesn't bind in xaml so no need to worry about NotifyPropertyChanged } /// <summary> /// Gets and sets the name of currently selected mapMember. /// </summary> private string _mapMemberName; public string MapMemberName { get { return _mapMemberName; } set { SetProperty(ref _mapMemberName, value); } } /// <summary> /// Gets the Apply command to write query definition to mapMember. /// </summary> private RelayCommand _applyCommand; public ICommand ApplyCommand { get { if (_applyCommand == null) _applyCommand = new RelayCommand(() => SaveChanges(), CanSaveChanges); return _applyCommand; } } #endregion #region Events /// <summary> /// Event handler for ProjectClosing event. /// </summary> /// <param name="args">The ProjectClosing arguments.</param> /// <returns></returns> private Task OnProjectClosing(ArcGIS.Desktop.Core.Events.ProjectClosingEventArgs args) { // if already Canceled, ignore if (args.Cancel) return Task.CompletedTask; // save current changes SaveChanges(); // reset the control ClearControlProperties(); return Task.CompletedTask; } /// <summary> /// Event handler for TOCSelectionChangedEvent event /// </summary> /// <param name="args">The event arguments.</param> private void OnSelectedLayersChanged(MapViewEventArgs args) { // save current changes SaveChanges(); // set up for the next selected mapMember BuildControlProperties(args.MapView); } #endregion /// <summary> /// Build a QueryBuilderControlProperties for the specified mapView. Finds the first BasicFeatureLayer or StandAloneTable highlighted in the TOC. /// </summary> /// <param name="mapView">a mapView.</param> private void BuildControlProperties(MapView mapView) { MapMember mapMember = null; if (mapView != null) { // only interested in basicFeatureLayers ... they are the ones with definition queries var selectedTOCLayers = mapView.GetSelectedLayers().OfType<BasicFeatureLayer>(); var selectedTOCTables = mapView.GetSelectedStandaloneTables(); // take layers over tables... but only take the first if (selectedTOCLayers.Count() > 0) mapMember = selectedTOCLayers.First(); else if (selectedTOCTables.Count() > 0) mapMember = selectedTOCTables.First(); } // build the control properties BuildControlProperties(mapMember); } /// <summary> /// Initialize a QueryBuilderControlProperties with the specified mapMember. Use the current definition query of that mapMember (if it exists) to extend the /// initialization. /// </summary> /// <param name="mapMember">MapMember to initialize the QueryBuilderControlProperties. </param> private void BuildControlProperties(MapMember mapMember) { // find the current definition query for the mapMember string expression = ""; BasicFeatureLayer fLayer = mapMember as BasicFeatureLayer; StandaloneTable table = mapMember as StandaloneTable; if (fLayer != null) expression = fLayer.DefinitionQuery; else if (table != null) expression = table.DefinitionQuery; // create it var props = new QueryBuilderControlProperties() { MapMember = mapMember, Expression = expression, }; // set the binding properties this.ControlProperties = props; MapMemberName = mapMember?.Name ?? ""; // keep track of the original expression _origExpression = expression; } /// <summary> /// Use a null mapMember to reset the QueryBuilderControlProperties. /// </summary> private void ClearControlProperties() { // reset the control MapMember mapMember = null; BuildControlProperties(mapMember); } /// <summary> /// Has the current expression been altered? /// </summary> /// <returns>true if the current expression has been altered. False otherwise.</returns> private bool CanSaveChanges() { string newExpression = Expression ?? ""; return (string.Compare(_origExpression, newExpression) != 0); } /// <summary> /// Saves the current expression to the appropriate mapMember according to user response. /// </summary> private void SaveChanges() { // get the new expression string newExpression = Expression ?? ""; // is it different? if (string.Compare(_origExpression, newExpression) != 0) { if (ArcGIS.Desktop.Framework.Dialogs.MessageBox.Show("Expression has changed. Do you wish to save it?", "Definition Query", System.Windows.MessageBoxButton.YesNo, System.Windows.MessageBoxImage.Question) == System.Windows.MessageBoxResult.Yes) { // update internal var _origExpression = newExpression; var fLayer = ControlProperties.MapMember as BasicFeatureLayer; var table = ControlProperties.MapMember as StandaloneTable; // update mapMember definition query QueuedTask.Run(() => { if (fLayer != null) fLayer.SetDefinitionQuery(newExpression); else if (table != null) table.SetDefinitionQuery(newExpression); }); } } } } /// <summary> /// Button implementation to show the DockPane. /// </summary> internal class DefinitionQueryDockPane_ShowButton : Button { protected override void OnClick() { DefinitionQueryDockPaneViewModel.Show(); } } }
// Copyright (c) Umbraco. // See LICENSE for more details. using System.Collections.Generic; using System.Linq; using Moq; using NUnit.Framework; using Umbraco.Cms.Core; using Umbraco.Cms.Core.Cache; using Umbraco.Cms.Core.Editors; using Umbraco.Cms.Core.Models; using Umbraco.Cms.Core.Models.Entities; using Umbraco.Cms.Core.Models.Membership; using Umbraco.Cms.Core.Services; using Umbraco.Cms.Tests.Common.Builders; using Umbraco.Cms.Tests.Common.Builders.Extensions; using Constants = Umbraco.Cms.Core.Constants; namespace Umbraco.Cms.Tests.UnitTests.Umbraco.Infrastructure.Editors { [TestFixture] public class UserEditorAuthorizationHelperTests { [Test] public void Admin_Is_Authorized() { IUser currentUser = CreateAdminUser(); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new int[0], new string[0]); Assert.IsTrue(result.Success); } [Test] public void Non_Admin_Cannot_Save_Admin() { IUser currentUser = CreateUser(); IUser savingUser = CreateAdminUser(); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new int[0], new string[0]); Assert.IsFalse(result.Success); } [Test] public void Cannot_Grant_Group_Membership_Without_Being_A_Member() { IUser currentUser = CreateUser(withGroup: true); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new int[0], new[] { "FunGroup" }); Assert.IsFalse(result.Success); } [Test] public void Can_Grant_Group_Membership_With_Being_A_Member() { IUser currentUser = CreateUser(withGroup: true); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new int[0], new[] { "test" }); Assert.IsTrue(result.Success); } [Test] [TestCase(Constants.Security.AdminGroupAlias, Constants.Security.AdminGroupAlias, ExpectedResult = true)] [TestCase(Constants.Security.AdminGroupAlias, "SomethingElse", ExpectedResult = true)] [TestCase(Constants.Security.EditorGroupAlias, Constants.Security.AdminGroupAlias, ExpectedResult = false)] [TestCase(Constants.Security.EditorGroupAlias, "SomethingElse", ExpectedResult = false)] [TestCase(Constants.Security.EditorGroupAlias, Constants.Security.EditorGroupAlias, ExpectedResult = true)] public bool Can_only_add_user_groups_you_are_part_of_yourself_unless_you_are_admin(string groupAlias, string groupToAdd) { var currentUser = Mock.Of<IUser>(user => user.Groups == new[] { new ReadOnlyUserGroup(1, "CurrentUser", "icon-user", null, null, groupAlias, new string[0], new string[0]) }); IUser savingUser = null; // This means it is a new created user var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); var result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new int[0], new[] { groupToAdd }); return result.Success; } [Test] public void Can_Add_Another_Content_Start_Node_On_User_With_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startContentIds: new[] { 9876 }); IUser savingUser = CreateUser(startContentIds: new[] { 1234 }); var contentService = new Mock<IContentService>(); contentService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IContent>(content => content.Path == nodePaths[id])); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // adding 5555 which currentUser has access to since it's a child of 9876 ... adding is still ok even though currentUser doesn't have access to 1234 Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new[] { 1234, 5555 }, new int[0], new string[0]); Assert.IsTrue(result.Success); } [Test] public void Can_Remove_Content_Start_Node_On_User_Without_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startContentIds: new[] { 9876 }); IUser savingUser = CreateUser(startContentIds: new[] { 1234, 4567 }); var contentService = new Mock<IContentService>(); contentService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IContent>(content => content.Path == nodePaths[id])); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // removing 4567 start node even though currentUser doesn't have acces to it ... removing is ok Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new[] { 1234 }, new int[0], new string[0]); Assert.IsTrue(result.Success); } [Test] public void Cannot_Add_Content_Start_Node_On_User_Without_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startContentIds: new[] { 9876 }); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); contentService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IContent>(content => content.Path == nodePaths[id])); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // adding 1234 but currentUser doesn't have access to it ... nope Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new[] { 1234 }, new int[0], new string[0]); Assert.IsFalse(result.Success); } [Test] public void Can_Add_Content_Start_Node_On_User_With_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startContentIds: new[] { 9876 }); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); contentService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IContent>(content => content.Path == nodePaths[id])); var mediaService = new Mock<IMediaService>(); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // adding 5555 which currentUser has access to since it's a child of 9876 ... ok Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new[] { 5555 }, new int[0], new string[0]); Assert.IsTrue(result.Success); } [Test] public void Cannot_Add_Media_Start_Node_On_User_Without_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startMediaIds: new[] { 9876 }); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); mediaService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IMedia>(content => content.Path == nodePaths[id])); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // adding 1234 but currentUser doesn't have access to it ... nope Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new[] { 1234 }, new string[0]); Assert.IsFalse(result.Success); } [Test] public void Can_Add_Media_Start_Node_On_User_With_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startMediaIds: new[] { 9876 }); IUser savingUser = CreateUser(); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); mediaService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IMedia>(content => content.Path == nodePaths[id])); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // adding 5555 which currentUser has access to since it's a child of 9876 ... ok Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new[] { 5555 }, new string[0]); Assert.IsTrue(result.Success); } [Test] public void Can_Add_Another_Media_Start_Node_On_User_With_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startMediaIds: new[] { 9876 }); IUser savingUser = CreateUser(startMediaIds: new[] { 1234 }); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); mediaService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IMedia>(content => content.Path == nodePaths[id])); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // adding 5555 which currentUser has access to since it's a child of 9876 ... adding is still ok even though currentUser doesn't have access to 1234 Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new[] { 1234, 5555 }, new string[0]); Assert.IsTrue(result.Success); } [Test] public void Can_Remove_Media_Start_Node_On_User_Without_Access() { var nodePaths = new Dictionary<int, string> { { 1234, "-1,1234" }, { 9876, "-1,9876" }, { 5555, "-1,9876,5555" }, { 4567, "-1,4567" }, }; IUser currentUser = CreateUser(startMediaIds: new[] { 9876 }); IUser savingUser = CreateUser(startMediaIds: new[] { 1234, 4567 }); var contentService = new Mock<IContentService>(); var mediaService = new Mock<IMediaService>(); mediaService.Setup(x => x.GetById(It.IsAny<int>())) .Returns((int id) => Mock.Of<IMedia>(content => content.Path == nodePaths[id])); var userService = new Mock<IUserService>(); var entityService = new Mock<IEntityService>(); entityService.Setup(service => service.GetAllPaths(It.IsAny<UmbracoObjectTypes>(), It.IsAny<int[]>())) .Returns((UmbracoObjectTypes objType, int[] ids) => ids.Select(x => new TreeEntityPath { Path = nodePaths[x], Id = x })); var authHelper = new UserEditorAuthorizationHelper( contentService.Object, mediaService.Object, entityService.Object, AppCaches.Disabled); // removing 4567 start node even though currentUser doesn't have acces to it ... removing is ok Attempt<string> result = authHelper.IsAuthorized(currentUser, savingUser, new int[0], new[] { 1234 }, new string[0]); Assert.IsTrue(result.Success); } private static IUser CreateUser(bool withGroup = false, int[] startContentIds = null, int[] startMediaIds = null) { UserBuilder<object> builder = new UserBuilder() .WithStartContentIds(startContentIds ?? (new int[0])) .WithStartMediaIds(startMediaIds ?? (new int[0])); if (withGroup) { builder = (UserBuilder)builder .AddUserGroup() .WithName("Test") .WithAlias("test") .Done(); } return builder.Build(); } private static IUser CreateAdminUser() => new UserBuilder() .AddUserGroup() .WithId(1) .WithName("Admin") .WithAlias(Constants.Security.AdminGroupAlias) .Done() .Build(); } }
using System.Data; using FluentMigrator.Expressions; using FluentMigrator.Infrastructure; using FluentMigrator.Model; namespace FluentMigrator.Builders { public abstract class ExpressionBuilderWithColumnTypesBase<ExpressionT, NextT> : ExpressionBuilderBase<ExpressionT> where ExpressionT : IMigrationExpression where NextT : IFluentSyntax { protected ExpressionBuilderWithColumnTypesBase(ExpressionT expression) : base(expression) { } public abstract ColumnDefinition GetColumnForType(); private ColumnDefinition Column { get { return GetColumnForType(); } } public NextT AsAnsiString() { SetColumnAsString(DbType.AnsiString); return (NextT)(object)this; } public NextT AsAnsiString(string collationName) { SetColumnAsString(dbType: DbType.AnsiString, collationName: collationName); return (NextT)(object)this; } public NextT AsAnsiString(int size) { SetColumnAsString(DbType.AnsiString, size); return (NextT)(object)this; } public NextT AsAnsiString(int size, string collationName) { SetColumnAsString(DbType.AnsiString, size, collationName); return (NextT)(object)this; } public NextT AsBinary() { Column.Type = DbType.Binary; return (NextT)(object)this; } public NextT AsBinary(int size) { Column.Type = DbType.Binary; Column.Size = size; return (NextT)(object)this; } public NextT AsBoolean() { Column.Type = DbType.Boolean; return (NextT)(object)this; } public NextT AsByte() { Column.Type = DbType.Byte; return (NextT)(object)this; } public NextT AsCurrency() { Column.Type = DbType.Currency; return (NextT)(object)this; } public NextT AsDate() { Column.Type = DbType.Date; return (NextT)(object)this; } public NextT AsDateTime() { Column.Type = DbType.DateTime; return (NextT)(object)this; } public NextT AsDateTimeOffset() { Column.Type = DbType.DateTimeOffset; return (NextT)(object)this; } public NextT AsDecimal() { Column.Type = DbType.Decimal; return (NextT)(object)this; } public NextT AsDecimal(int size, int precision) { Column.Type = DbType.Decimal; Column.Size = size; Column.Precision = precision; return (NextT)(object)this; } public NextT AsDouble() { Column.Type = DbType.Double; return (NextT)(object)this; } public NextT AsFixedLengthString(int size) { SetColumnAsString(DbType.StringFixedLength, size); return (NextT)(object)this; } public NextT AsFixedLengthString(int size, string collationName) { SetColumnAsString(DbType.StringFixedLength, size, collationName); return (NextT)(object)this; } public NextT AsFixedLengthAnsiString(int size) { SetColumnAsString(DbType.AnsiStringFixedLength, size); return (NextT)(object)this; } public NextT AsFixedLengthAnsiString(int size, string collationName) { SetColumnAsString(DbType.AnsiStringFixedLength, size, collationName); return (NextT)(object)this; } public NextT AsFloat() { Column.Type = DbType.Single; return (NextT)(object)this; } public NextT AsGuid() { Column.Type = DbType.Guid; return (NextT)(object)this; } public NextT AsInt16() { Column.Type = DbType.Int16; return (NextT)(object)this; } public NextT AsInt32() { Column.Type = DbType.Int32; return (NextT)(object)this; } public NextT AsInt64() { Column.Type = DbType.Int64; return (NextT)(object)this; } public NextT AsString() { SetColumnAsString(DbType.String); return (NextT)(object)this; } public NextT AsString(string collationName) { SetColumnAsString(dbType: DbType.String, collationName: collationName); return (NextT)(object)this; } public NextT AsString(int size) { SetColumnAsString(DbType.String, size); return (NextT)(object)this; } public NextT AsString(int size, string collationName) { SetColumnAsString(DbType.String, size, collationName); return (NextT)(object)this; } public NextT AsTime() { Column.Type = DbType.Time; return (NextT)(object)this; } public NextT AsXml() { Column.Type = DbType.Xml; return (NextT)(object)this; } public NextT AsXml(int size) { Column.Type = DbType.Xml; Column.Size = size; return (NextT)(object)this; } public NextT AsCustom(string customType) { Column.Type = null; Column.CustomType = customType; return (NextT)(object)this; } private void SetColumnAsString(DbType dbType, int size = -100, string collationName = "") { Column.Type = dbType; if (size != -100) { Column.Size = size; } if (!string.IsNullOrEmpty(collationName)) { Column.CollationName = collationName; } } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Linq; using NUnit.Framework; using osu.Framework.Allocation; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Framework.Graphics.Transforms; using osu.Framework.MathUtils; using osu.Framework.Testing; using osu.Framework.Timing; using osuTK; using osuTK.Graphics; namespace osu.Framework.Tests.Visual.Drawables { public class TestCaseTransformRewinding : TestCase { private const double interval = 250; private const int interval_count = 4; private static double intervalAt(int sequence) => interval * sequence; private ManualClock manualClock; private FramedClock manualFramedClock; [SetUp] public void SetUp() => Schedule(() => { Clear(); manualClock = new ManualClock(); manualFramedClock = new FramedClock(manualClock); }); [Test] public void BasicScale() { boxTest(box => { box.Scale = Vector2.One; box.ScaleTo(0, interval * 4); }); checkAtTime(250, box => Precision.AlmostEquals(box.Scale.X, 0.75f)); checkAtTime(500, box => Precision.AlmostEquals(box.Scale.X, 0.5f)); checkAtTime(750, box => Precision.AlmostEquals(box.Scale.X, 0.25f)); checkAtTime(1000, box => Precision.AlmostEquals(box.Scale.X, 0f)); checkAtTime(500, box => Precision.AlmostEquals(box.Scale.X, 0.5f)); checkAtTime(250, box => Precision.AlmostEquals(box.Scale.X, 0.75f)); AddAssert("check transform count", () => box.Transforms.Count == 1); } [Test] public void ScaleSequence() { boxTest(box => { box.Scale = Vector2.One; box.ScaleTo(0.75f, interval).Then() .ScaleTo(0.5f, interval).Then() .ScaleTo(0.25f, interval).Then() .ScaleTo(0, interval); }); int i = 0; checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.75f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.5f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.25f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0f)); checkAtTime(interval * (i -= 2), box => Precision.AlmostEquals(box.Scale.X, 0.5f)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 0.75f)); AddAssert("check transform count", () => box.Transforms.Count == 4); } [Test] public void BasicMovement() { boxTest(box => { box.Scale = new Vector2(0.25f); box.Anchor = Anchor.TopLeft; box.Origin = Anchor.TopLeft; box.MoveTo(new Vector2(0.75f, 0), interval).Then() .MoveTo(new Vector2(0.75f, 0.75f), interval).Then() .MoveTo(new Vector2(0, 0.75f), interval).Then() .MoveTo(new Vector2(0), interval); }); int i = 0; checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.X, 0.75f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Y, 0.75f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.X, 0f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Y, 0f)); checkAtTime(interval * (i -= 2), box => Precision.AlmostEquals(box.Y, 0.75f)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.X, 0.75f)); AddAssert("check transform count", () => box.Transforms.Count == 4); } [Test] public void MoveSequence() { boxTest(box => { box.Scale = new Vector2(0.25f); box.Anchor = Anchor.TopLeft; box.Origin = Anchor.TopLeft; box.ScaleTo(0.5f, interval).MoveTo(new Vector2(0.5f), interval).Then() .ScaleTo(0.1f, interval).MoveTo(new Vector2(0, 0.75f), interval).Then() .ScaleTo(1f, interval).MoveTo(new Vector2(0, 0), interval).Then() .FadeTo(0, interval); }); int i = 0; checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.X, 0.5f) && Precision.AlmostEquals(box.Scale.X, 0.5f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Y, 0.75f) && Precision.AlmostEquals(box.Scale.X, 0.1f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.X, 0f)); checkAtTime(interval * (i += 2), box => Precision.AlmostEquals(box.Alpha, 0f)); checkAtTime(interval * (i - 2), box => Precision.AlmostEquals(box.Alpha, 1f)); AddAssert("check transform count", () => box.Transforms.Count == 7); } [Test] public void MoveCancelSequence() { boxTest(box => { box.Scale = new Vector2(0.25f); box.Anchor = Anchor.TopLeft; box.Origin = Anchor.TopLeft; box.ScaleTo(0.5f, interval).Then().ScaleTo(1, interval); Scheduler.AddDelayed(() => { box.ScaleTo(new Vector2(0.1f), interval); }, interval / 2); }); int i = 0; checkAtTime(interval * i, box => Precision.AlmostEquals(box.Scale.X, 0.25f)); checkAtTime(interval * ++i, box => !Precision.AlmostEquals(box.Scale.X, 0.5f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.1f)); AddAssert("check transform count", () => box.Transforms.Count == 2); } [Test] public void SameTypeInType() { boxTest(box => { box.ScaleTo(0.5f, interval * 4); box.Delay(interval * 2).ScaleTo(1, interval); }); int i = 0; checkAtTime(interval * i, box => Precision.AlmostEquals(box.Scale.X, 0.25f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.3125f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.375f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 1)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 0.375f)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 0.3125f)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 0.25f)); AddAssert("check transform count", () => box.Transforms.Count == 2); } [Test] public void SameTypeInPartialOverlap() { boxTest(box => { box.ScaleTo(0.5f, interval * 2); box.Delay(interval).ScaleTo(1, interval * 2); }); int i = 0; checkAtTime(interval * i, box => Precision.AlmostEquals(box.Scale.X, 0.25f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.375f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 0.6875f)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 1)); checkAtTime(interval * ++i, box => Precision.AlmostEquals(box.Scale.X, 1)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 1)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 0.6875f)); checkAtTime(interval * --i, box => Precision.AlmostEquals(box.Scale.X, 0.375f)); AddAssert("check transform count", () => box.Transforms.Count == 2); } [Test] public void StartInMiddleOfSequence() { boxTest(box => { box.Alpha = 0; box.Delay(interval * 2).FadeInFromZero(interval); box.ScaleTo(0.9f, interval * 4); }, 750); checkAtTime(interval * 3, box => Precision.AlmostEquals(box.Alpha, 1)); checkAtTime(interval * 4, box => Precision.AlmostEquals(box.Alpha, 1) && Precision.AlmostEquals(box.Scale.X, 0.9f)); checkAtTime(interval * 2, box => Precision.AlmostEquals(box.Alpha, 0) && Precision.AlmostEquals(box.Scale.X, 0.575f)); AddAssert("check transform count", () => box.Transforms.Count == 3); } [Test] public void LoopSequence() { boxTest(box => { box.RotateTo(0).RotateTo(90, interval).Loop(); }); const int count = 4; for (int i = 0; i <= count; i++) { if (i > 0) checkAtTime(interval * i - 1, box => Precision.AlmostEquals(box.Rotation, 90f, 1)); checkAtTime(interval * i, box => Precision.AlmostEquals(box.Rotation, 0)); } AddAssert("check transform count", () => box.Transforms.Count == 10); for (int i = count; i >= 0; i--) { if (i > 0) checkAtTime(interval * i - 1, box => Precision.AlmostEquals(box.Rotation, 90f, 1)); checkAtTime(interval * i, box => Precision.AlmostEquals(box.Rotation, 0)); } } [Test] public void StartInMiddleOfLoopSequence() { boxTest(box => { box.RotateTo(0).RotateTo(90, interval).Loop(); }, 750); checkAtTime(750, box => Precision.AlmostEquals(box.Rotation, 0f)); AddAssert("check transform count", () => box.Transforms.Count == 8); const int count = 4; for (int i = 0; i <= count; i++) { if (i > 0) checkAtTime(interval * i - 1, box => Precision.AlmostEquals(box.Rotation, 90f, 1)); checkAtTime(interval * i, box => Precision.AlmostEquals(box.Rotation, 0)); } AddAssert("check transform count", () => box.Transforms.Count == 10); for (int i = count; i >= 0; i--) { if (i > 0) checkAtTime(interval * i - 1, box => Precision.AlmostEquals(box.Rotation, 90f, 1)); checkAtTime(interval * i, box => Precision.AlmostEquals(box.Rotation, 0)); } } private Box box; private void checkAtTime(double time, Func<Box, bool> assert) { AddAssert($"check at time {time}", () => { manualClock.CurrentTime = time; box.Clock = manualFramedClock; box.UpdateSubTree(); return assert(box); }); } private void boxTest(Action<Box> action, int startTime = 0) { AddStep("add box", () => { Add(new AnimationContainer(startTime) { Child = box = new Box { Anchor = Anchor.Centre, Origin = Anchor.Centre, RelativeSizeAxes = Axes.Both, RelativePositionAxes = Axes.Both, Scale = new Vector2(0.25f), }, ExaminableDrawable = box, }); action(box); }); } private class AnimationContainer : Container { public override bool RemoveCompletedTransforms => false; protected override Container<Drawable> Content => content; private readonly Container content; private readonly SpriteText minTimeText; private readonly SpriteText currentTimeText; private readonly SpriteText maxTimeText; private readonly Tick seekingTick; private readonly WrappingTimeContainer wrapping; public Box ExaminableDrawable; private readonly FlowContainer<DrawableTransform> transforms; public AnimationContainer(int startTime = 0) { Anchor = Anchor.Centre; Origin = Anchor.Centre; RelativeSizeAxes = Axes.Both; InternalChild = wrapping = new WrappingTimeContainer(startTime) { RelativeSizeAxes = Axes.Both, Children = new Drawable[] { new Container { FillMode = FillMode.Fit, RelativeSizeAxes = Axes.Both, Anchor = Anchor.Centre, Origin = Anchor.Centre, Size = new Vector2(0.6f), Children = new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = Color4.DarkGray, }, content = new Container { RelativeSizeAxes = Axes.Both, Masking = true, }, } }, transforms = new FillFlowContainer<DrawableTransform> { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Spacing = Vector2.One, RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Width = 0.2f, }, new Container { Anchor = Anchor.TopCentre, Origin = Anchor.TopCentre, RelativeSizeAxes = Axes.Both, Size = new Vector2(0.8f, 0.1f), Children = new Drawable[] { minTimeText = new SpriteText { Anchor = Anchor.BottomLeft, Origin = Anchor.TopLeft, }, currentTimeText = new SpriteText { RelativePositionAxes = Axes.X, Anchor = Anchor.BottomLeft, Origin = Anchor.BottomCentre, Y = -10, }, maxTimeText = new SpriteText { Anchor = Anchor.BottomRight, Origin = Anchor.TopRight, }, seekingTick = new Tick(0, false), new Tick(0), new Tick(1), new Tick(2), new Tick(3), new Tick(4), } } } }; } private List<Transform> displayedTransforms; protected override void Update() { base.Update(); double time = wrapping.Time.Current; minTimeText.Text = wrapping.MinTime.ToString("n0"); currentTimeText.Text = time.ToString("n0"); seekingTick.X = currentTimeText.X = (float)(time / (wrapping.MaxTime - wrapping.MinTime)); maxTimeText.Text = wrapping.MaxTime.ToString("n0"); maxTimeText.Colour = time > wrapping.MaxTime ? Color4.Gray : wrapping.Time.Elapsed > 0 ? Color4.Blue : Color4.Red; minTimeText.Colour = time < wrapping.MinTime ? Color4.Gray : content.Time.Elapsed > 0 ? Color4.Blue : Color4.Red; if (displayedTransforms == null || !ExaminableDrawable.Transforms.SequenceEqual(displayedTransforms)) { transforms.Clear(); foreach (var t in ExaminableDrawable.Transforms) transforms.Add(new DrawableTransform(t)); displayedTransforms = new List<Transform>(ExaminableDrawable.Transforms); } } private class DrawableTransform : CompositeDrawable { private readonly Transform transform; private readonly Box applied; private readonly Box appliedToEnd; private readonly SpriteText text; private const float height = 15; public DrawableTransform(Transform transform) { this.transform = transform; RelativeSizeAxes = Axes.X; Height = height; InternalChildren = new Drawable[] { applied = new Box { Size = new Vector2(height) }, appliedToEnd = new Box { X = height + 2, Size = new Vector2(height) }, text = new SpriteText { X = (height + 2) * 2, Font = new FontUsage(size: height) }, }; } protected override void Update() { base.Update(); applied.Colour = transform.Applied ? Color4.Green : Color4.Red; appliedToEnd.Colour = transform.AppliedToEnd ? Color4.Green : Color4.Red; text.Text = transform.ToString(); } } private class Tick : Box { private readonly int tick; private readonly bool colouring; public Tick(int tick, bool colouring = true) { this.tick = tick; this.colouring = colouring; Anchor = Anchor.BottomLeft; Origin = Anchor.BottomCentre; Size = new Vector2(1, 10); Colour = Color4.White; RelativePositionAxes = Axes.X; X = (float)tick / interval_count; } protected override void Update() { base.Update(); if (colouring) Colour = Time.Current > tick * interval ? Color4.Yellow : Color4.White; } } } private class WrappingTimeContainer : Container { // Padding, in milliseconds, at each end of maxima of the clock time private const double time_padding = 50; public double MinTime => clock.MinTime + time_padding; public double MaxTime => clock.MaxTime - time_padding; private readonly ReversibleClock clock; public WrappingTimeContainer(double startTime) { clock = new ReversibleClock(startTime); } [BackgroundDependencyLoader] private void load() { // Replace the game clock, but keep it as a reference clock.SetSource(Clock); Clock = clock; } protected override void LoadComplete() { base.LoadComplete(); clock.MinTime = -time_padding; clock.MaxTime = intervalAt(interval_count) + time_padding; } private class ReversibleClock : IFrameBasedClock { private readonly double startTime; public double MinTime; public double MaxTime = 1000; private IFrameBasedClock trackingClock; private bool reversed; public ReversibleClock(double startTime) { this.startTime = startTime; } public void SetSource(IFrameBasedClock trackingClock) { this.trackingClock = new FramedOffsetClock(trackingClock) { Offset = -trackingClock.CurrentTime + startTime }; } public double CurrentTime { get; private set; } public double Rate => trackingClock.Rate; public bool IsRunning => trackingClock.IsRunning; public double ElapsedFrameTime => (reversed ? -1 : 1) * trackingClock.ElapsedFrameTime; public double FramesPerSecond => trackingClock.FramesPerSecond; public FrameTimeInfo TimeInfo => new FrameTimeInfo { Current = CurrentTime, Elapsed = ElapsedFrameTime }; public void ProcessFrame() { trackingClock.ProcessFrame(); // There are two iterations, when iteration % 2 == 0 : not reversed int iteration = (int)(trackingClock.CurrentTime / (MaxTime - MinTime)); reversed = iteration % 2 == 1; double iterationTime = trackingClock.CurrentTime % (MaxTime - MinTime); if (reversed) CurrentTime = MaxTime - iterationTime; else CurrentTime = MinTime + iterationTime; } } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using System.Net; using System.Net.Http; using System.Web; using System.IO; using System.Reflection; using System.Runtime.Serialization.Json; using Newtonsoft.Json.Linq; namespace AccountServer { /// <summary> /// Web Server - listens for connections, and services them /// </summary> public class WebServer { HttpListener _listener; bool _running; Dictionary<string, Session> _sessions; static object _lock = new object(); Session _empty; public WebServer() { _listener = new HttpListener(); _listener.Prefixes.Add("http://+:" + AppSettings.Default.Port + "/"); Log("Listening on port {0}", AppSettings.Default.Port); _sessions = new Dictionary<string, Session>(); _empty = new Session(null); // Start thread to expire sessions after 30 mins of inactivity new Task(delegate() { for (; ; ) { Thread.Sleep(180000); // 30 mins DateTime now = Utils.Now; lock (_sessions) { foreach (string key in _sessions.Keys.ToList()) { Session s = _sessions[key]; if (s.Expires < now) _sessions.Remove(key); } } } }).Start(); } /// <summary> /// Log message to console and trace /// </summary> static public void Log(string s) { s = s.Trim(); lock (_lock) { System.Diagnostics.Trace.WriteLine(s); Console.WriteLine(s); } } /// <summary> /// Log message to console and trace /// </summary> static public void Log(string format, params object[] args) { try { Log(string.Format(format, args)); } catch (Exception ex) { Log(string.Format("{0}:Error logging {1}", format, ex.Message)); } } /// <summary> /// Start WebServer listening for connections /// </summary> public void Start() { try { _running = true; _listener.Start(); while (_running) { try { HttpListenerContext request = _listener.GetContext(); ThreadPool.QueueUserWorkItem(ProcessRequest, request); } catch { } } } catch (HttpListenerException ex) { Log(ex.ToString()); } catch (ThreadAbortException) { } catch (Exception ex) { Log(ex.ToString()); } } public void Stop() { _running = false; _listener.Stop(); } /// <summary> /// All Active Sessions /// </summary> public IEnumerable<Session> Sessions { get { return _sessions.Values; } } /// <summary> /// Process a single request /// </summary> /// <param name="listenerContext"></param> void ProcessRequest(object listenerContext) { DateTime started = DateTime.Now; // For timing response HttpListenerContext context = null; AppModule module = null; StringBuilder log = new StringBuilder(); // Session log writes to here, and it is displayed at the end try { context = (HttpListenerContext)listenerContext; log.AppendFormat("{0} {1}:{2}:[ms]:", context.Request.RemoteEndPoint.Address, context.Request.Headers["X-Forwarded-For"], context.Request.RawUrl); Session session = null; string filename = HttpUtility.UrlDecode(context.Request.Url.AbsolutePath).Substring(1); if (filename == "") filename = "company"; // Default page is Company string moduleName = null; string methodName = null; string baseName = filename.Replace(".html", ""); // Ignore .html - treat as a program request if (baseName.IndexOf(".") < 0) { // Urls of the form /ModuleName[/MethodName][.html] call a C# AppModule string[] parts = baseName.Split('/'); if (parts.Length <= 2) { Type type = AppModule.GetModule(parts[0]); if (type != null) { // The AppModule exists - create the object module = (AppModule)Activator.CreateInstance(type); moduleName = parts[0]; if (parts.Length == 2) methodName = parts[1]; } } } if (moduleName == null) { // No AppModule found - treat url as a file request moduleName = "FileSender"; module = new FileSender(filename); } // AppModule found - retrieve or create a session for it Cookie cookie = context.Request.Cookies["session"]; if (cookie != null) { _sessions.TryGetValue(cookie.Value, out session); if (AppSettings.Default.SessionLogging) log.AppendFormat("[{0}{1}]", cookie.Value, session == null ? " not found" : ""); } if (session == null) { if (moduleName == "FileSender") { session = new Session(null); } else { session = new Session(this); cookie = new Cookie("session", session.Cookie, "/"); if (AppSettings.Default.SessionLogging) log.AppendFormat("[{0} new session]", cookie.Value); } } if (cookie != null) { context.Response.Cookies.Add(cookie); cookie.Expires = session.Expires = Utils.Now.AddHours(1); } // Set up module module.Session = session; module.LogString = log; if (moduleName.EndsWith("Module")) moduleName = moduleName.Substring(0, moduleName.Length - 6); using (module) { // Call method module.Call(context, moduleName, methodName); } } catch (Exception ex) { while (ex is TargetInvocationException) ex = ex.InnerException; if (ex is System.Net.Sockets.SocketException) { log.AppendFormat("Request error: {0}\r\n", ex.Message); } else { log.AppendFormat("Request error: {0}\r\n", ex); if (module == null || !module.ResponseSent) { try { module = new AppModule(); module.Session = _empty; module.LogString = log; module.Context = context; module.Module = "exception"; module.Method = "default"; module.Title = "Exception"; module.Exception = ex; module.WriteResponse(module.Template("exception", module), "text/html", HttpStatusCode.InternalServerError); } catch (Exception ex1) { log.AppendFormat("Error displaying exception: {0}\r\n", ex1); if (module == null || !module.ResponseSent) { try { module.WriteResponse("Error displaying exception:" + ex.Message, "text/plain", HttpStatusCode.InternalServerError); } catch { } } } } } } if (context != null) { try { context.Response.Close(); } catch { } } try { Log(log.ToString().Replace(":[ms]:", ":" + Math.Round((DateTime.Now - started).TotalMilliseconds, 0) + " ms:")); } catch { } } /// <summary> /// Simple session (we will actually use the derived class Session) /// </summary> public class BaseSession { public JObject Object { get; private set; } public DateTime Expires; public string Cookie { get; private set; } public WebServer Server; public BaseSession(WebServer server) { if (server != null) { Session session; Random r = new Random(); lock (server._sessions) { do { Cookie = ""; for (int i = 0; i < 20; i++) Cookie += (char)('A' + r.Next(26)); } while (server._sessions.TryGetValue(Cookie, out session)); Object = new JObject(); server._sessions[Cookie] = (Session)this; } Server = server; } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // ------------------------------------------------------------------------------ // Changes to this file must follow the http://aka.ms/api-review process. // ------------------------------------------------------------------------------ namespace System.ServiceModel { public partial class BasicHttpBinding : System.ServiceModel.HttpBindingBase { public BasicHttpBinding() { } public BasicHttpBinding(System.ServiceModel.BasicHttpSecurityMode securityMode) { } public System.ServiceModel.BasicHttpSecurity Security { get { return default(System.ServiceModel.BasicHttpSecurity); } set { } } public override System.ServiceModel.Channels.IChannelFactory<TChannel> BuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingParameterCollection parameters) { return default(System.ServiceModel.Channels.IChannelFactory<TChannel>); } public override System.ServiceModel.Channels.BindingElementCollection CreateBindingElements() { return default(System.ServiceModel.Channels.BindingElementCollection); } } public enum BasicHttpMessageCredentialType { Certificate = 1, UserName = 0, } public sealed partial class BasicHttpSecurity { internal BasicHttpSecurity() { } public System.ServiceModel.BasicHttpSecurityMode Mode { get { return default(System.ServiceModel.BasicHttpSecurityMode); } set { } } public System.ServiceModel.HttpTransportSecurity Transport { get { return default(System.ServiceModel.HttpTransportSecurity); } set { } } } public enum BasicHttpSecurityMode { None = 0, Transport = 1, Message = 2, TransportWithMessageCredential = 3, TransportCredentialOnly = 4, } public partial class BasicHttpsBinding : System.ServiceModel.HttpBindingBase { public BasicHttpsBinding() { } public BasicHttpsBinding(System.ServiceModel.BasicHttpsSecurityMode securityMode) { } public System.ServiceModel.BasicHttpsSecurity Security { get { return default(System.ServiceModel.BasicHttpsSecurity); } set { } } public override System.ServiceModel.Channels.IChannelFactory<TChannel> BuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingParameterCollection parameters) { return default(System.ServiceModel.Channels.IChannelFactory<TChannel>); } public override System.ServiceModel.Channels.BindingElementCollection CreateBindingElements() { return default(System.ServiceModel.Channels.BindingElementCollection); } } public sealed partial class BasicHttpsSecurity { internal BasicHttpsSecurity() { } public System.ServiceModel.BasicHttpsSecurityMode Mode { get { return default(System.ServiceModel.BasicHttpsSecurityMode); } set { } } public System.ServiceModel.HttpTransportSecurity Transport { get { return default(System.ServiceModel.HttpTransportSecurity); } set { } } } public enum BasicHttpsSecurityMode { Transport = 0, TransportWithMessageCredential = 1, } public abstract partial class HttpBindingBase : System.ServiceModel.Channels.Binding { internal HttpBindingBase() { } [System.ComponentModel.DefaultValueAttribute(false)] public bool AllowCookies { get { return default(bool); } set { } } [System.ComponentModel.DefaultValue(false)] public bool BypassProxyOnLocal { get { return default(bool); } set { } } public System.ServiceModel.EnvelopeVersion EnvelopeVersion { get { return default(System.ServiceModel.EnvelopeVersion); } } [System.ComponentModel.DefaultValueAttribute((long)524288)] public long MaxBufferPoolSize { get { return default(long); } set { } } [System.ComponentModel.DefaultValueAttribute(65536)] public int MaxBufferSize { get { return default(int); } set { } } [System.ComponentModel.DefaultValueAttribute((long)65536)] public long MaxReceivedMessageSize { get { return default(long); } set { } } [System.ComponentModel.DefaultValueAttribute(null)] [System.ComponentModel.TypeConverter(typeof(System.UriTypeConverter))] public System.Uri ProxyAddress { get { return default(System.Uri); } set { } } public System.Xml.XmlDictionaryReaderQuotas ReaderQuotas { get { return default(System.Xml.XmlDictionaryReaderQuotas); } set { } } public override string Scheme { get { return default(string); } } public System.Text.Encoding TextEncoding { get { return default(System.Text.Encoding); } set { } } [System.ComponentModel.DefaultValueAttribute((System.ServiceModel.TransferMode)(0))] public System.ServiceModel.TransferMode TransferMode { get { return default(System.ServiceModel.TransferMode); } set { } } [System.ComponentModel.DefaultValue(true)] public bool UseDefaultWebProxy { get { return default(bool); } set { } } } public enum HttpClientCredentialType { Basic = 1, Certificate = 5, Digest = 2, InheritedFromHost = 6, None = 0, Ntlm = 3, Windows = 4, } public enum HttpProxyCredentialType { None, Basic, Digest, Ntlm, Windows, } public sealed partial class HttpTransportSecurity { public HttpTransportSecurity() { } public System.ServiceModel.HttpClientCredentialType ClientCredentialType { get { return default(System.ServiceModel.HttpClientCredentialType); } set { } } public System.ServiceModel.HttpProxyCredentialType ProxyCredentialType { get { return default(System.ServiceModel.HttpProxyCredentialType); } set { } } } public partial class NetHttpBinding : System.ServiceModel.HttpBindingBase { public NetHttpBinding() { } public NetHttpBinding(System.ServiceModel.BasicHttpSecurityMode securityMode) { } public NetHttpBinding(string configurationName) { } [System.ComponentModel.DefaultValueAttribute((System.ServiceModel.NetHttpMessageEncoding)(0))] public System.ServiceModel.NetHttpMessageEncoding MessageEncoding { get { return default(System.ServiceModel.NetHttpMessageEncoding); } set { } } public System.ServiceModel.BasicHttpSecurity Security { get { return default(System.ServiceModel.BasicHttpSecurity); } set { } } public System.ServiceModel.Channels.WebSocketTransportSettings WebSocketSettings { get { return default(System.ServiceModel.Channels.WebSocketTransportSettings); } } public override System.ServiceModel.Channels.IChannelFactory<TChannel> BuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingParameterCollection parameters) { return default(System.ServiceModel.Channels.IChannelFactory<TChannel>); } public override System.ServiceModel.Channels.BindingElementCollection CreateBindingElements() { return default(System.ServiceModel.Channels.BindingElementCollection); } } public partial class NetHttpsBinding : System.ServiceModel.HttpBindingBase { public NetHttpsBinding() { } public NetHttpsBinding(System.ServiceModel.BasicHttpsSecurityMode securityMode) { } [System.ComponentModel.DefaultValueAttribute((System.ServiceModel.NetHttpMessageEncoding)(0))] public System.ServiceModel.NetHttpMessageEncoding MessageEncoding { get { return default(System.ServiceModel.NetHttpMessageEncoding); } set { } } public System.ServiceModel.BasicHttpsSecurity Security { get { return default(System.ServiceModel.BasicHttpsSecurity); } set { } } public System.ServiceModel.Channels.WebSocketTransportSettings WebSocketSettings { get { return default(System.ServiceModel.Channels.WebSocketTransportSettings); } } public override System.ServiceModel.Channels.IChannelFactory<TChannel> BuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingParameterCollection parameters) { return default(System.ServiceModel.Channels.IChannelFactory<TChannel>); } public override System.ServiceModel.Channels.BindingElementCollection CreateBindingElements() { return default(System.ServiceModel.Channels.BindingElementCollection); } } public enum NetHttpMessageEncoding { Binary = 0, Text = 1, Mtom = 2, } } namespace System.ServiceModel.Channels { public sealed partial class HttpRequestMessageProperty : System.ServiceModel.Channels.IMessageProperty { public HttpRequestMessageProperty() { } public System.Net.WebHeaderCollection Headers { get { return default(System.Net.WebHeaderCollection); } } public string Method { get { return default(string); } set { } } public static string Name { get { return default(string); } } public string QueryString { get { return default(string); } set { } } public bool SuppressEntityBody { get { return default(bool); } set { } } System.ServiceModel.Channels.IMessageProperty System.ServiceModel.Channels.IMessageProperty.CreateCopy() { return default(System.ServiceModel.Channels.IMessageProperty); } } public sealed partial class HttpResponseMessageProperty : System.ServiceModel.Channels.IMessageProperty { public HttpResponseMessageProperty() { } public System.Net.WebHeaderCollection Headers { get { return default(System.Net.WebHeaderCollection); } } public static string Name { get { return default(string); } } public System.Net.HttpStatusCode StatusCode { get { return default(System.Net.HttpStatusCode); } set { } } public string StatusDescription { get { return default(string); } set { } } System.ServiceModel.Channels.IMessageProperty System.ServiceModel.Channels.IMessageProperty.CreateCopy() { return default(System.ServiceModel.Channels.IMessageProperty); } } public partial class HttpsTransportBindingElement : System.ServiceModel.Channels.HttpTransportBindingElement { public HttpsTransportBindingElement() { } protected HttpsTransportBindingElement(System.ServiceModel.Channels.HttpsTransportBindingElement elementToBeCloned) { } public bool RequireClientCertificate { get { return default(bool); } set { } } public override string Scheme { get { return default(string); } } public override System.ServiceModel.Channels.IChannelFactory<TChannel> BuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingContext context) { return default(System.ServiceModel.Channels.IChannelFactory<TChannel>); } public override System.ServiceModel.Channels.BindingElement Clone() { return default(System.ServiceModel.Channels.BindingElement); } public override T GetProperty<T>(System.ServiceModel.Channels.BindingContext context) { return default(T); } } public partial class HttpTransportBindingElement : System.ServiceModel.Channels.TransportBindingElement { public HttpTransportBindingElement() { } protected HttpTransportBindingElement(System.ServiceModel.Channels.HttpTransportBindingElement elementToBeCloned) { } [System.ComponentModel.DefaultValueAttribute(false)] public bool AllowCookies { get { return default(bool); } set { } } [System.ComponentModel.DefaultValueAttribute((System.Net.AuthenticationSchemes)(32768))] public System.Net.AuthenticationSchemes AuthenticationScheme { get { return default(System.Net.AuthenticationSchemes); } set { } } [System.ComponentModel.DefaultValue(false)] public bool BypassProxyOnLocal { get { return default(bool); } set { } } [System.ComponentModel.DefaultValueAttribute(65536)] public int MaxBufferSize { get { return default(int); } set { } } [System.ComponentModel.DefaultValue(null)] [System.ComponentModel.TypeConverter(typeof(System.UriTypeConverter))] public System.Uri ProxyAddress { get { return default(System.Uri); } set { } } [System.ComponentModel.DefaultValueAttribute((System.Net.AuthenticationSchemes)(32768))] public System.Net.AuthenticationSchemes ProxyAuthenticationScheme { get { return default(System.Net.AuthenticationSchemes); } set { } } public override string Scheme { get { return default(string); } } [System.ComponentModel.DefaultValueAttribute((System.ServiceModel.TransferMode)(0))] public System.ServiceModel.TransferMode TransferMode { get { return default(System.ServiceModel.TransferMode); } set { } } public System.ServiceModel.Channels.WebSocketTransportSettings WebSocketSettings { get { return default(System.ServiceModel.Channels.WebSocketTransportSettings); } set { } } [System.ComponentModel.DefaultValue(true)] public bool UseDefaultWebProxy { get { return default(bool); } set { } } public override System.ServiceModel.Channels.IChannelFactory<TChannel> BuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingContext context) { return default(System.ServiceModel.Channels.IChannelFactory<TChannel>); } public override bool CanBuildChannelFactory<TChannel>(System.ServiceModel.Channels.BindingContext context) { return default(bool); } public override System.ServiceModel.Channels.BindingElement Clone() { return default(System.ServiceModel.Channels.BindingElement); } public override T GetProperty<T>(System.ServiceModel.Channels.BindingContext context) { return default(T); } } public partial interface IHttpCookieContainerManager { System.Net.CookieContainer CookieContainer { get; set; } } public sealed partial class WebSocketTransportSettings : System.IEquatable<System.ServiceModel.Channels.WebSocketTransportSettings> { public const string BinaryMessageReceivedAction = "http://schemas.microsoft.com/2011/02/websockets/onbinarymessage"; public const string TextMessageReceivedAction = "http://schemas.microsoft.com/2011/02/websockets/ontextmessage"; public WebSocketTransportSettings() { } [System.ComponentModel.DefaultValueAttribute(false)] public bool DisablePayloadMasking { get { return default(bool); } set { } } [System.ComponentModel.DefaultValueAttribute(typeof(System.TimeSpan), "00:00:00")] public System.TimeSpan KeepAliveInterval { get { return default(System.TimeSpan); } set { } } [System.ComponentModel.DefaultValueAttribute(null)] public string SubProtocol { get { return default(string); } set { } } [System.ComponentModel.DefaultValueAttribute((System.ServiceModel.Channels.WebSocketTransportUsage)(2))] public System.ServiceModel.Channels.WebSocketTransportUsage TransportUsage { get { return default(System.ServiceModel.Channels.WebSocketTransportUsage); } set { } } public override bool Equals(object obj) { return default(bool); } public bool Equals(System.ServiceModel.Channels.WebSocketTransportSettings other) { return default(bool); } public override int GetHashCode() { return default(int); } } public enum WebSocketTransportUsage { Always = 1, Never = 2, WhenDuplex = 0, } }
// DeflaterEngine.cs // // Copyright (C) 2001 Mike Krueger // Copyright (C) 2004 John Reilly // // This file was translated from java, it was part of the GNU Classpath // Copyright (C) 2001 Free Software Foundation, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. #if ZIPLIB using System; using ICSharpCode.SharpZipLib.Checksums; namespace ICSharpCode.SharpZipLib.Zip.Compression { /// <summary> /// Strategies for deflater /// </summary> internal enum DeflateStrategy { /// <summary> /// The default strategy /// </summary> Default = 0, /// <summary> /// This strategy will only allow longer string repetitions. It is /// useful for random data with a small character set. /// </summary> Filtered = 1, /// <summary> /// This strategy will not look for string repetitions at all. It /// only encodes with Huffman trees (which means, that more common /// characters get a smaller encoding. /// </summary> HuffmanOnly = 2 } // DEFLATE ALGORITHM: // // The uncompressed stream is inserted into the window array. When // the window array is full the first half is thrown away and the // second half is copied to the beginning. // // The head array is a hash table. Three characters build a hash value // and they the value points to the corresponding index in window of // the last string with this hash. The prev array implements a // linked list of matches with the same hash: prev[index & WMASK] points // to the previous index with the same hash. // /// <summary> /// Low level compression engine for deflate algorithm which uses a 32K sliding window /// with secondary compression from Huffman/Shannon-Fano codes. /// </summary> internal class DeflaterEngine : DeflaterConstants { #region Constants const int TooFar = 4096; #endregion #region Constructors /// <summary> /// Construct instance with pending buffer /// </summary> /// <param name="pending"> /// Pending buffer to use /// </param>> public DeflaterEngine(DeflaterPending pending) { this.pending = pending; huffman = new DeflaterHuffman(pending); adler = new Adler32(); window = new byte[2 * WSIZE]; head = new short[HASH_SIZE]; prev = new short[WSIZE]; // We start at index 1, to avoid an implementation deficiency, that // we cannot build a repeat pattern at index 0. blockStart = strstart = 1; } #endregion /// <summary> /// Deflate drives actual compression of data /// </summary> /// <param name="flush">True to flush input buffers</param> /// <param name="finish">Finish deflation with the current input.</param> /// <returns>Returns true if progress has been made.</returns> public bool Deflate(bool flush, bool finish) { bool progress; do { FillWindow(); bool canFlush = flush && (inputOff == inputEnd); #if DebugDeflation if (DeflaterConstants.DEBUGGING) { Console.WriteLine("window: [" + blockStart + "," + strstart + "," + lookahead + "], " + compressionFunction + "," + canFlush); } #endif switch (compressionFunction) { case DEFLATE_STORED: progress = DeflateStored(canFlush, finish); break; case DEFLATE_FAST: progress = DeflateFast(canFlush, finish); break; case DEFLATE_SLOW: progress = DeflateSlow(canFlush, finish); break; default: throw new InvalidOperationException("unknown compressionFunction"); } } while (pending.IsFlushed && progress); // repeat while we have no pending output and progress was made return progress; } /// <summary> /// Sets input data to be deflated. Should only be called when <code>NeedsInput()</code> /// returns true /// </summary> /// <param name="buffer">The buffer containing input data.</param> /// <param name="offset">The offset of the first byte of data.</param> /// <param name="count">The number of bytes of data to use as input.</param> public void SetInput(byte[] buffer, int offset, int count) { if ( buffer == null ) { throw new ArgumentNullException("buffer"); } if ( offset < 0 ) { throw new ArgumentOutOfRangeException("offset"); } if ( count < 0 ) { throw new ArgumentOutOfRangeException("count"); } if (inputOff < inputEnd) { throw new InvalidOperationException("Old input was not completely processed"); } int end = offset + count; /* We want to throw an ArrayIndexOutOfBoundsException early. The * check is very tricky: it also handles integer wrap around. */ if ((offset > end) || (end > buffer.Length) ) { throw new ArgumentOutOfRangeException("count"); } inputBuf = buffer; inputOff = offset; inputEnd = end; } /// <summary> /// Determines if more <see cref="SetInput">input</see> is needed. /// </summary> /// <returns>Return true if input is needed via <see cref="SetInput">SetInput</see></returns> public bool NeedsInput() { return (inputEnd == inputOff); } /// <summary> /// Set compression dictionary /// </summary> /// <param name="buffer">The buffer containing the dictionary data</param> /// <param name="offset">The offset in the buffer for the first byte of data</param> /// <param name="length">The length of the dictionary data.</param> public void SetDictionary(byte[] buffer, int offset, int length) { #if DebugDeflation if (DeflaterConstants.DEBUGGING && (strstart != 1) ) { throw new InvalidOperationException("strstart not 1"); } #endif adler.Update(buffer, offset, length); if (length < MIN_MATCH) { return; } if (length > MAX_DIST) { offset += length - MAX_DIST; length = MAX_DIST; } System.Array.Copy(buffer, offset, window, strstart, length); UpdateHash(); --length; while (--length > 0) { InsertString(); strstart++; } strstart += 2; blockStart = strstart; } /// <summary> /// Reset internal state /// </summary> public void Reset() { huffman.Reset(); adler.Reset(); blockStart = strstart = 1; lookahead = 0; totalIn = 0; prevAvailable = false; matchLen = MIN_MATCH - 1; for (int i = 0; i < HASH_SIZE; i++) { head[i] = 0; } for (int i = 0; i < WSIZE; i++) { prev[i] = 0; } } /// <summary> /// Reset Adler checksum /// </summary> public void ResetAdler() { adler.Reset(); } /// <summary> /// Get current value of Adler checksum /// </summary> public int Adler { get { return unchecked((int)adler.Value); } } /// <summary> /// Total data processed /// </summary> public long TotalIn { get { return totalIn; } } /// <summary> /// Get/set the <see cref="DeflateStrategy">deflate strategy</see> /// </summary> public DeflateStrategy Strategy { get { return strategy; } set { strategy = value; } } /// <summary> /// Set the deflate level (0-9) /// </summary> /// <param name="level">The value to set the level to.</param> public void SetLevel(int level) { if ( (level < 0) || (level > 9) ) { throw new ArgumentOutOfRangeException("level"); } goodLength = DeflaterConstants.GOOD_LENGTH[level]; max_lazy = DeflaterConstants.MAX_LAZY[level]; niceLength = DeflaterConstants.NICE_LENGTH[level]; max_chain = DeflaterConstants.MAX_CHAIN[level]; if (DeflaterConstants.COMPR_FUNC[level] != compressionFunction) { #if DebugDeflation if (DeflaterConstants.DEBUGGING) { Console.WriteLine("Change from " + compressionFunction + " to " + DeflaterConstants.COMPR_FUNC[level]); } #endif switch (compressionFunction) { case DEFLATE_STORED: if (strstart > blockStart) { huffman.FlushStoredBlock(window, blockStart, strstart - blockStart, false); blockStart = strstart; } UpdateHash(); break; case DEFLATE_FAST: if (strstart > blockStart) { huffman.FlushBlock(window, blockStart, strstart - blockStart, false); blockStart = strstart; } break; case DEFLATE_SLOW: if (prevAvailable) { huffman.TallyLit(window[strstart-1] & 0xff); } if (strstart > blockStart) { huffman.FlushBlock(window, blockStart, strstart - blockStart, false); blockStart = strstart; } prevAvailable = false; matchLen = MIN_MATCH - 1; break; } compressionFunction = COMPR_FUNC[level]; } } /// <summary> /// Fill the window /// </summary> public void FillWindow() { /* If the window is almost full and there is insufficient lookahead, * move the upper half to the lower one to make room in the upper half. */ if (strstart >= WSIZE + MAX_DIST) { SlideWindow(); } /* If there is not enough lookahead, but still some input left, * read in the input */ while (lookahead < DeflaterConstants.MIN_LOOKAHEAD && inputOff < inputEnd) { int more = 2 * WSIZE - lookahead - strstart; if (more > inputEnd - inputOff) { more = inputEnd - inputOff; } System.Array.Copy(inputBuf, inputOff, window, strstart + lookahead, more); adler.Update(inputBuf, inputOff, more); inputOff += more; totalIn += more; lookahead += more; } if (lookahead >= MIN_MATCH) { UpdateHash(); } } void UpdateHash() { /* if (DEBUGGING) { Console.WriteLine("updateHash: "+strstart); } */ ins_h = (window[strstart] << HASH_SHIFT) ^ window[strstart + 1]; } /// <summary> /// Inserts the current string in the head hash and returns the previous /// value for this hash. /// </summary> /// <returns>The previous hash value</returns> int InsertString() { short match; int hash = ((ins_h << HASH_SHIFT) ^ window[strstart + (MIN_MATCH -1)]) & HASH_MASK; #if DebugDeflation if (DeflaterConstants.DEBUGGING) { if (hash != (((window[strstart] << (2*HASH_SHIFT)) ^ (window[strstart + 1] << HASH_SHIFT) ^ (window[strstart + 2])) & HASH_MASK)) { throw new SharpZipBaseException("hash inconsistent: " + hash + "/" +window[strstart] + "," +window[strstart + 1] + "," +window[strstart + 2] + "," + HASH_SHIFT); } } #endif prev[strstart & WMASK] = match = head[hash]; head[hash] = unchecked((short)strstart); ins_h = hash; return match & 0xffff; } void SlideWindow() { Array.Copy(window, WSIZE, window, 0, WSIZE); matchStart -= WSIZE; strstart -= WSIZE; blockStart -= WSIZE; // Slide the hash table (could be avoided with 32 bit values // at the expense of memory usage). for (int i = 0; i < HASH_SIZE; ++i) { int m = head[i] & 0xffff; head[i] = (short)(m >= WSIZE ? (m - WSIZE) : 0); } // Slide the prev table. for (int i = 0; i < WSIZE; i++) { int m = prev[i] & 0xffff; prev[i] = (short)(m >= WSIZE ? (m - WSIZE) : 0); } } /// <summary> /// Find the best (longest) string in the window matching the /// string starting at strstart. /// /// Preconditions: /// <code> /// strstart + MAX_MATCH &lt;= window.length.</code> /// </summary> /// <param name="curMatch"></param> /// <returns>True if a match greater than the minimum length is found</returns> bool FindLongestMatch(int curMatch) { int chainLength = this.max_chain; int niceLength = this.niceLength; short[] prev = this.prev; int scan = this.strstart; int match; int best_end = this.strstart + matchLen; int best_len = Math.Max(matchLen, MIN_MATCH - 1); int limit = Math.Max(strstart - MAX_DIST, 0); int strend = strstart + MAX_MATCH - 1; byte scan_end1 = window[best_end - 1]; byte scan_end = window[best_end]; // Do not waste too much time if we already have a good match: if (best_len >= this.goodLength) { chainLength >>= 2; } /* Do not look for matches beyond the end of the input. This is necessary * to make deflate deterministic. */ if (niceLength > lookahead) { niceLength = lookahead; } #if DebugDeflation if (DeflaterConstants.DEBUGGING && (strstart > 2 * WSIZE - MIN_LOOKAHEAD)) { throw new InvalidOperationException("need lookahead"); } #endif do { #if DebugDeflation if (DeflaterConstants.DEBUGGING && (curMatch >= strstart) ) { throw new InvalidOperationException("no future"); } #endif if (window[curMatch + best_len] != scan_end || window[curMatch + best_len - 1] != scan_end1 || window[curMatch] != window[scan] || window[curMatch + 1] != window[scan + 1]) { continue; } match = curMatch + 2; scan += 2; /* We check for insufficient lookahead only every 8th comparison; * the 256th check will be made at strstart + 258. */ while ( window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && window[++scan] == window[++match] && (scan < strend)) { // Do nothing } if (scan > best_end) { #if DebugDeflation if (DeflaterConstants.DEBUGGING && (ins_h == 0) ) Console.Error.WriteLine("Found match: " + curMatch + "-" + (scan - strstart)); #endif matchStart = curMatch; best_end = scan; best_len = scan - strstart; if (best_len >= niceLength) { break; } scan_end1 = window[best_end - 1]; scan_end = window[best_end]; } scan = strstart; } while ((curMatch = (prev[curMatch & WMASK] & 0xffff)) > limit && --chainLength != 0); matchLen = Math.Min(best_len, lookahead); return matchLen >= MIN_MATCH; } bool DeflateStored(bool flush, bool finish) { if (!flush && (lookahead == 0)) { return false; } strstart += lookahead; lookahead = 0; int storedLength = strstart - blockStart; if ((storedLength >= DeflaterConstants.MAX_BLOCK_SIZE) || // Block is full (blockStart < WSIZE && storedLength >= MAX_DIST) || // Block may move out of window flush) { bool lastBlock = finish; if (storedLength > DeflaterConstants.MAX_BLOCK_SIZE) { storedLength = DeflaterConstants.MAX_BLOCK_SIZE; lastBlock = false; } #if DebugDeflation if (DeflaterConstants.DEBUGGING) { Console.WriteLine("storedBlock[" + storedLength + "," + lastBlock + "]"); } #endif huffman.FlushStoredBlock(window, blockStart, storedLength, lastBlock); blockStart += storedLength; return !lastBlock; } return true; } bool DeflateFast(bool flush, bool finish) { if (lookahead < MIN_LOOKAHEAD && !flush) { return false; } while (lookahead >= MIN_LOOKAHEAD || flush) { if (lookahead == 0) { // We are flushing everything huffman.FlushBlock(window, blockStart, strstart - blockStart, finish); blockStart = strstart; return false; } if (strstart > 2 * WSIZE - MIN_LOOKAHEAD) { /* slide window, as FindLongestMatch needs this. * This should only happen when flushing and the window * is almost full. */ SlideWindow(); } int hashHead; if (lookahead >= MIN_MATCH && (hashHead = InsertString()) != 0 && strategy != DeflateStrategy.HuffmanOnly && strstart - hashHead <= MAX_DIST && FindLongestMatch(hashHead)) { // longestMatch sets matchStart and matchLen #if DebugDeflation if (DeflaterConstants.DEBUGGING) { for (int i = 0 ; i < matchLen; i++) { if (window[strstart + i] != window[matchStart + i]) { throw new SharpZipBaseException("Match failure"); } } } #endif bool full = huffman.TallyDist(strstart - matchStart, matchLen); lookahead -= matchLen; if (matchLen <= max_lazy && lookahead >= MIN_MATCH) { while (--matchLen > 0) { ++strstart; InsertString(); } ++strstart; } else { strstart += matchLen; if (lookahead >= MIN_MATCH - 1) { UpdateHash(); } } matchLen = MIN_MATCH - 1; if (!full) { continue; } } else { // No match found huffman.TallyLit(window[strstart] & 0xff); ++strstart; --lookahead; } if (huffman.IsFull()) { bool lastBlock = finish && (lookahead == 0); huffman.FlushBlock(window, blockStart, strstart - blockStart, lastBlock); blockStart = strstart; return !lastBlock; } } return true; } bool DeflateSlow(bool flush, bool finish) { if (lookahead < MIN_LOOKAHEAD && !flush) { return false; } while (lookahead >= MIN_LOOKAHEAD || flush) { if (lookahead == 0) { if (prevAvailable) { huffman.TallyLit(window[strstart-1] & 0xff); } prevAvailable = false; // We are flushing everything #if DebugDeflation if (DeflaterConstants.DEBUGGING && !flush) { throw new SharpZipBaseException("Not flushing, but no lookahead"); } #endif huffman.FlushBlock(window, blockStart, strstart - blockStart, finish); blockStart = strstart; return false; } if (strstart >= 2 * WSIZE - MIN_LOOKAHEAD) { /* slide window, as FindLongestMatch needs this. * This should only happen when flushing and the window * is almost full. */ SlideWindow(); } int prevMatch = matchStart; int prevLen = matchLen; if (lookahead >= MIN_MATCH) { int hashHead = InsertString(); if (strategy != DeflateStrategy.HuffmanOnly && hashHead != 0 && strstart - hashHead <= MAX_DIST && FindLongestMatch(hashHead)) { // longestMatch sets matchStart and matchLen // Discard match if too small and too far away if (matchLen <= 5 && (strategy == DeflateStrategy.Filtered || (matchLen == MIN_MATCH && strstart - matchStart > TooFar))) { matchLen = MIN_MATCH - 1; } } } // previous match was better if ((prevLen >= MIN_MATCH) && (matchLen <= prevLen) ) { #if DebugDeflation if (DeflaterConstants.DEBUGGING) { for (int i = 0 ; i < matchLen; i++) { if (window[strstart-1+i] != window[prevMatch + i]) throw new SharpZipBaseException(); } } #endif huffman.TallyDist(strstart - 1 - prevMatch, prevLen); prevLen -= 2; do { strstart++; lookahead--; if (lookahead >= MIN_MATCH) { InsertString(); } } while (--prevLen > 0); strstart ++; lookahead--; prevAvailable = false; matchLen = MIN_MATCH - 1; } else { if (prevAvailable) { huffman.TallyLit(window[strstart-1] & 0xff); } prevAvailable = true; strstart++; lookahead--; } if (huffman.IsFull()) { int len = strstart - blockStart; if (prevAvailable) { len--; } bool lastBlock = (finish && (lookahead == 0) && !prevAvailable); huffman.FlushBlock(window, blockStart, len, lastBlock); blockStart += len; return !lastBlock; } } return true; } #region Instance Fields // Hash index of string to be inserted int ins_h; /// <summary> /// Hashtable, hashing three characters to an index for window, so /// that window[index]..window[index+2] have this hash code. /// Note that the array should really be unsigned short, so you need /// to and the values with 0xffff. /// </summary> short[] head; /// <summary> /// <code>prev[index &amp; WMASK]</code> points to the previous index that has the /// same hash code as the string starting at index. This way /// entries with the same hash code are in a linked list. /// Note that the array should really be unsigned short, so you need /// to and the values with 0xffff. /// </summary> short[] prev; int matchStart; // Length of best match int matchLen; // Set if previous match exists bool prevAvailable; int blockStart; /// <summary> /// Points to the current character in the window. /// </summary> int strstart; /// <summary> /// lookahead is the number of characters starting at strstart in /// window that are valid. /// So window[strstart] until window[strstart+lookahead-1] are valid /// characters. /// </summary> int lookahead; /// <summary> /// This array contains the part of the uncompressed stream that /// is of relevance. The current character is indexed by strstart. /// </summary> byte[] window; DeflateStrategy strategy; int max_chain, max_lazy, niceLength, goodLength; /// <summary> /// The current compression function. /// </summary> int compressionFunction; /// <summary> /// The input data for compression. /// </summary> byte[] inputBuf; /// <summary> /// The total bytes of input read. /// </summary> long totalIn; /// <summary> /// The offset into inputBuf, where input data starts. /// </summary> int inputOff; /// <summary> /// The end offset of the input data. /// </summary> int inputEnd; DeflaterPending pending; DeflaterHuffman huffman; /// <summary> /// The adler checksum /// </summary> Adler32 adler; #endregion } } #endif
// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gaxgrpc = Google.Api.Gax.Grpc; using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore; using proto = Google.Protobuf; using wkt = Google.Protobuf.WellKnownTypes; using grpccore = Grpc.Core; using grpcinter = Grpc.Core.Interceptors; using sys = System; using sc = System.Collections; using scg = System.Collections.Generic; using sco = System.Collections.ObjectModel; using st = System.Threading; using stt = System.Threading.Tasks; namespace Google.Showcase.V1Beta1 { /// <summary>Settings for <see cref="IdentityClient"/> instances.</summary> public sealed partial class IdentitySettings : gaxgrpc::ServiceSettingsBase { /// <summary>Get a new instance of the default <see cref="IdentitySettings"/>.</summary> /// <returns>A new instance of the default <see cref="IdentitySettings"/>.</returns> public static IdentitySettings GetDefault() => new IdentitySettings(); /// <summary>Constructs a new <see cref="IdentitySettings"/> object with default settings.</summary> public IdentitySettings() { } private IdentitySettings(IdentitySettings existing) : base(existing) { gax::GaxPreconditions.CheckNotNull(existing, nameof(existing)); CreateUserSettings = existing.CreateUserSettings; GetUserSettings = existing.GetUserSettings; UpdateUserSettings = existing.UpdateUserSettings; DeleteUserSettings = existing.DeleteUserSettings; ListUsersSettings = existing.ListUsersSettings; OnCopy(existing); } partial void OnCopy(IdentitySettings existing); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to <c>IdentityClient.CreateUser</c> /// and <c>IdentityClient.CreateUserAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings CreateUserSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to <c>IdentityClient.GetUser</c> /// and <c>IdentityClient.GetUserAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings GetUserSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to <c>IdentityClient.UpdateUser</c> /// and <c>IdentityClient.UpdateUserAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings UpdateUserSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to <c>IdentityClient.DeleteUser</c> /// and <c>IdentityClient.DeleteUserAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings DeleteUserSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary> /// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to <c>IdentityClient.ListUsers</c> /// and <c>IdentityClient.ListUsersAsync</c>. /// </summary> /// <remarks> /// <list type="bullet"> /// <item><description>This call will not be retried.</description></item> /// <item><description>No timeout is applied.</description></item> /// </list> /// </remarks> public gaxgrpc::CallSettings ListUsersSettings { get; set; } = gaxgrpc::CallSettings.FromExpiration(gax::Expiration.None); /// <summary>Creates a deep clone of this object, with all the same property values.</summary> /// <returns>A deep clone of this <see cref="IdentitySettings"/> object.</returns> public IdentitySettings Clone() => new IdentitySettings(this); } /// <summary> /// Builder class for <see cref="IdentityClient"/> to provide simple configuration of credentials, endpoint etc. /// </summary> public sealed partial class IdentityClientBuilder : gaxgrpc::ClientBuilderBase<IdentityClient> { /// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary> public IdentitySettings Settings { get; set; } /// <summary>Creates a new builder with default settings.</summary> public IdentityClientBuilder() { UseJwtAccessWithScopes = IdentityClient.UseJwtAccessWithScopes; } partial void InterceptBuild(ref IdentityClient client); partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<IdentityClient> task); /// <summary>Builds the resulting client.</summary> public override IdentityClient Build() { IdentityClient client = null; InterceptBuild(ref client); return client ?? BuildImpl(); } /// <summary>Builds the resulting client asynchronously.</summary> public override stt::Task<IdentityClient> BuildAsync(st::CancellationToken cancellationToken = default) { stt::Task<IdentityClient> task = null; InterceptBuildAsync(cancellationToken, ref task); return task ?? BuildAsyncImpl(cancellationToken); } private IdentityClient BuildImpl() { Validate(); grpccore::CallInvoker callInvoker = CreateCallInvoker(); return IdentityClient.Create(callInvoker, Settings); } private async stt::Task<IdentityClient> BuildAsyncImpl(st::CancellationToken cancellationToken) { Validate(); grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false); return IdentityClient.Create(callInvoker, Settings); } /// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary> protected override string GetDefaultEndpoint() => IdentityClient.DefaultEndpoint; /// <summary> /// Returns the default scopes for this builder type, used if no scopes are otherwise specified. /// </summary> protected override scg::IReadOnlyList<string> GetDefaultScopes() => IdentityClient.DefaultScopes; /// <summary>Returns the channel pool to use when no other options are specified.</summary> protected override gaxgrpc::ChannelPool GetChannelPool() => IdentityClient.ChannelPool; /// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary> protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance; } /// <summary>Identity client wrapper, for convenient use.</summary> /// <remarks> /// A simple identity service. /// </remarks> public abstract partial class IdentityClient { /// <summary> /// The default endpoint for the Identity service, which is a host of "localhost:7469" and a port of 443. /// </summary> public static string DefaultEndpoint { get; } = "localhost:7469:443"; /// <summary>The default Identity scopes.</summary> /// <remarks>The default Identity scopes are:<list type="bullet"></list></remarks> public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[] { }); internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes); internal static bool UseJwtAccessWithScopes { get { bool useJwtAccessWithScopes = true; MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes); return useJwtAccessWithScopes; } } static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes); /// <summary> /// Asynchronously creates a <see cref="IdentityClient"/> using the default credentials, endpoint and settings. /// To specify custom credentials or other settings, use <see cref="IdentityClientBuilder"/>. /// </summary> /// <param name="cancellationToken"> /// The <see cref="st::CancellationToken"/> to use while creating the client. /// </param> /// <returns>The task representing the created <see cref="IdentityClient"/>.</returns> public static stt::Task<IdentityClient> CreateAsync(st::CancellationToken cancellationToken = default) => new IdentityClientBuilder().BuildAsync(cancellationToken); /// <summary> /// Synchronously creates a <see cref="IdentityClient"/> using the default credentials, endpoint and settings. /// To specify custom credentials or other settings, use <see cref="IdentityClientBuilder"/>. /// </summary> /// <returns>The created <see cref="IdentityClient"/>.</returns> public static IdentityClient Create() => new IdentityClientBuilder().Build(); /// <summary> /// Creates a <see cref="IdentityClient"/> which uses the specified call invoker for remote operations. /// </summary> /// <param name="callInvoker"> /// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null. /// </param> /// <param name="settings">Optional <see cref="IdentitySettings"/>.</param> /// <returns>The created <see cref="IdentityClient"/>.</returns> internal static IdentityClient Create(grpccore::CallInvoker callInvoker, IdentitySettings settings = null) { gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker)); grpcinter::Interceptor interceptor = settings?.Interceptor; if (interceptor != null) { callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor); } Identity.IdentityClient grpcClient = new Identity.IdentityClient(callInvoker); return new IdentityClientImpl(grpcClient, settings); } /// <summary> /// Shuts down any channels automatically created by <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not /// affected. /// </summary> /// <remarks> /// After calling this method, further calls to <see cref="Create()"/> and /// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down /// by another call to this method. /// </remarks> /// <returns>A task representing the asynchronous shutdown operation.</returns> public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync(); /// <summary>The underlying gRPC Identity client</summary> public virtual Identity.IdentityClient GrpcClient => throw new sys::NotImplementedException(); /// <summary> /// Creates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User CreateUser(CreateUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> CreateUserAsync(CreateUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Creates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> CreateUserAsync(CreateUserRequest request, st::CancellationToken cancellationToken) => CreateUserAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates a user. /// </summary> /// <param name="displayName"> /// The display_name of the user. /// </param> /// <param name="email"> /// The email address of the user. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User CreateUser(string displayName, string email, gaxgrpc::CallSettings callSettings = null) => CreateUser(new CreateUserRequest { User = new User { DisplayName = gax::GaxPreconditions.CheckNotNullOrEmpty(displayName, nameof(displayName)), Email = gax::GaxPreconditions.CheckNotNullOrEmpty(email, nameof(email)), }, }, callSettings); /// <summary> /// Creates a user. /// </summary> /// <param name="displayName"> /// The display_name of the user. /// </param> /// <param name="email"> /// The email address of the user. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> CreateUserAsync(string displayName, string email, gaxgrpc::CallSettings callSettings = null) => CreateUserAsync(new CreateUserRequest { User = new User { DisplayName = gax::GaxPreconditions.CheckNotNullOrEmpty(displayName, nameof(displayName)), Email = gax::GaxPreconditions.CheckNotNullOrEmpty(email, nameof(email)), }, }, callSettings); /// <summary> /// Creates a user. /// </summary> /// <param name="displayName"> /// The display_name of the user. /// </param> /// <param name="email"> /// The email address of the user. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> CreateUserAsync(string displayName, string email, st::CancellationToken cancellationToken) => CreateUserAsync(displayName, email, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Creates a user. /// </summary> /// <param name="displayName"> /// The display_name of the user. /// </param> /// <param name="email"> /// The email address of the user. /// </param> /// <param name="age"> /// The age of the user in years. /// </param> /// <param name="nickname"> /// The nickname of the user. /// /// (-- aip.dev/not-precedent: An empty string is a valid nickname. /// Ordinarily, proto3_optional should not be used on a `string` field. --) /// </param> /// <param name="enableNotifications"> /// Enables the receiving of notifications. The default is true if unset. /// /// (-- aip.dev/not-precedent: The default for the feature is true. /// Ordinarily, the default for a `bool` field should be false. --) /// </param> /// <param name="heightFeet"> /// The height of the user in feet. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User CreateUser(string displayName, string email, int age, string nickname, bool enableNotifications, double heightFeet, gaxgrpc::CallSettings callSettings = null) => CreateUser(new CreateUserRequest { User = new User { DisplayName = gax::GaxPreconditions.CheckNotNullOrEmpty(displayName, nameof(displayName)), Email = gax::GaxPreconditions.CheckNotNullOrEmpty(email, nameof(email)), Age = age, HeightFeet = heightFeet, Nickname = nickname ?? "", EnableNotifications = enableNotifications, }, }, callSettings); /// <summary> /// Creates a user. /// </summary> /// <param name="displayName"> /// The display_name of the user. /// </param> /// <param name="email"> /// The email address of the user. /// </param> /// <param name="age"> /// The age of the user in years. /// </param> /// <param name="nickname"> /// The nickname of the user. /// /// (-- aip.dev/not-precedent: An empty string is a valid nickname. /// Ordinarily, proto3_optional should not be used on a `string` field. --) /// </param> /// <param name="enableNotifications"> /// Enables the receiving of notifications. The default is true if unset. /// /// (-- aip.dev/not-precedent: The default for the feature is true. /// Ordinarily, the default for a `bool` field should be false. --) /// </param> /// <param name="heightFeet"> /// The height of the user in feet. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> CreateUserAsync(string displayName, string email, int age, string nickname, bool enableNotifications, double heightFeet, gaxgrpc::CallSettings callSettings = null) => CreateUserAsync(new CreateUserRequest { User = new User { DisplayName = gax::GaxPreconditions.CheckNotNullOrEmpty(displayName, nameof(displayName)), Email = gax::GaxPreconditions.CheckNotNullOrEmpty(email, nameof(email)), Age = age, HeightFeet = heightFeet, Nickname = nickname ?? "", EnableNotifications = enableNotifications, }, }, callSettings); /// <summary> /// Creates a user. /// </summary> /// <param name="displayName"> /// The display_name of the user. /// </param> /// <param name="email"> /// The email address of the user. /// </param> /// <param name="age"> /// The age of the user in years. /// </param> /// <param name="nickname"> /// The nickname of the user. /// /// (-- aip.dev/not-precedent: An empty string is a valid nickname. /// Ordinarily, proto3_optional should not be used on a `string` field. --) /// </param> /// <param name="enableNotifications"> /// Enables the receiving of notifications. The default is true if unset. /// /// (-- aip.dev/not-precedent: The default for the feature is true. /// Ordinarily, the default for a `bool` field should be false. --) /// </param> /// <param name="heightFeet"> /// The height of the user in feet. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> CreateUserAsync(string displayName, string email, int age, string nickname, bool enableNotifications, double heightFeet, st::CancellationToken cancellationToken) => CreateUserAsync(displayName, email, age, nickname, enableNotifications, heightFeet, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User GetUser(GetUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> GetUserAsync(GetUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> GetUserAsync(GetUserRequest request, st::CancellationToken cancellationToken) => GetUserAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="name"> /// The resource name of the requested user. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User GetUser(string name, gaxgrpc::CallSettings callSettings = null) => GetUser(new GetUserRequest { Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)), }, callSettings); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="name"> /// The resource name of the requested user. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> GetUserAsync(string name, gaxgrpc::CallSettings callSettings = null) => GetUserAsync(new GetUserRequest { Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)), }, callSettings); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="name"> /// The resource name of the requested user. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> GetUserAsync(string name, st::CancellationToken cancellationToken) => GetUserAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="name"> /// The resource name of the requested user. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User GetUser(UserName name, gaxgrpc::CallSettings callSettings = null) => GetUser(new GetUserRequest { UserName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)), }, callSettings); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="name"> /// The resource name of the requested user. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> GetUserAsync(UserName name, gaxgrpc::CallSettings callSettings = null) => GetUserAsync(new GetUserRequest { UserName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)), }, callSettings); /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="name"> /// The resource name of the requested user. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> GetUserAsync(UserName name, st::CancellationToken cancellationToken) => GetUserAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Updates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual User UpdateUser(UpdateUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Updates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> UpdateUserAsync(UpdateUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Updates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task<User> UpdateUserAsync(UpdateUserRequest request, st::CancellationToken cancellationToken) => UpdateUserAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual void DeleteUser(DeleteUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task DeleteUserAsync(DeleteUserRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task DeleteUserAsync(DeleteUserRequest request, st::CancellationToken cancellationToken) => DeleteUserAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="name"> /// The resource name of the user to delete. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual void DeleteUser(string name, gaxgrpc::CallSettings callSettings = null) => DeleteUser(new DeleteUserRequest { Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)), }, callSettings); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="name"> /// The resource name of the user to delete. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task DeleteUserAsync(string name, gaxgrpc::CallSettings callSettings = null) => DeleteUserAsync(new DeleteUserRequest { Name = gax::GaxPreconditions.CheckNotNullOrEmpty(name, nameof(name)), }, callSettings); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="name"> /// The resource name of the user to delete. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task DeleteUserAsync(string name, st::CancellationToken cancellationToken) => DeleteUserAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="name"> /// The resource name of the user to delete. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public virtual void DeleteUser(UserName name, gaxgrpc::CallSettings callSettings = null) => DeleteUser(new DeleteUserRequest { UserName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)), }, callSettings); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="name"> /// The resource name of the user to delete. /// </param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task DeleteUserAsync(UserName name, gaxgrpc::CallSettings callSettings = null) => DeleteUserAsync(new DeleteUserRequest { UserName = gax::GaxPreconditions.CheckNotNull(name, nameof(name)), }, callSettings); /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="name"> /// The resource name of the user to delete. /// </param> /// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param> /// <returns>A Task containing the RPC response.</returns> public virtual stt::Task DeleteUserAsync(UserName name, st::CancellationToken cancellationToken) => DeleteUserAsync(name, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken)); /// <summary> /// Lists all users. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable sequence of <see cref="User"/> resources.</returns> public virtual gax::PagedEnumerable<ListUsersResponse, User> ListUsers(ListUsersRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); /// <summary> /// Lists all users. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable asynchronous sequence of <see cref="User"/> resources.</returns> public virtual gax::PagedAsyncEnumerable<ListUsersResponse, User> ListUsersAsync(ListUsersRequest request, gaxgrpc::CallSettings callSettings = null) => throw new sys::NotImplementedException(); } /// <summary>Identity client wrapper implementation, for convenient use.</summary> /// <remarks> /// A simple identity service. /// </remarks> public sealed partial class IdentityClientImpl : IdentityClient { private readonly gaxgrpc::ApiCall<CreateUserRequest, User> _callCreateUser; private readonly gaxgrpc::ApiCall<GetUserRequest, User> _callGetUser; private readonly gaxgrpc::ApiCall<UpdateUserRequest, User> _callUpdateUser; private readonly gaxgrpc::ApiCall<DeleteUserRequest, wkt::Empty> _callDeleteUser; private readonly gaxgrpc::ApiCall<ListUsersRequest, ListUsersResponse> _callListUsers; /// <summary> /// Constructs a client wrapper for the Identity service, with the specified gRPC client and settings. /// </summary> /// <param name="grpcClient">The underlying gRPC client.</param> /// <param name="settings">The base <see cref="IdentitySettings"/> used within this client.</param> public IdentityClientImpl(Identity.IdentityClient grpcClient, IdentitySettings settings) { GrpcClient = grpcClient; IdentitySettings effectiveSettings = settings ?? IdentitySettings.GetDefault(); gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings); _callCreateUser = clientHelper.BuildApiCall<CreateUserRequest, User>(grpcClient.CreateUserAsync, grpcClient.CreateUser, effectiveSettings.CreateUserSettings); Modify_ApiCall(ref _callCreateUser); Modify_CreateUserApiCall(ref _callCreateUser); _callGetUser = clientHelper.BuildApiCall<GetUserRequest, User>(grpcClient.GetUserAsync, grpcClient.GetUser, effectiveSettings.GetUserSettings).WithGoogleRequestParam("name", request => request.Name); Modify_ApiCall(ref _callGetUser); Modify_GetUserApiCall(ref _callGetUser); _callUpdateUser = clientHelper.BuildApiCall<UpdateUserRequest, User>(grpcClient.UpdateUserAsync, grpcClient.UpdateUser, effectiveSettings.UpdateUserSettings).WithGoogleRequestParam("user.name", request => request.User?.Name); Modify_ApiCall(ref _callUpdateUser); Modify_UpdateUserApiCall(ref _callUpdateUser); _callDeleteUser = clientHelper.BuildApiCall<DeleteUserRequest, wkt::Empty>(grpcClient.DeleteUserAsync, grpcClient.DeleteUser, effectiveSettings.DeleteUserSettings).WithGoogleRequestParam("name", request => request.Name); Modify_ApiCall(ref _callDeleteUser); Modify_DeleteUserApiCall(ref _callDeleteUser); _callListUsers = clientHelper.BuildApiCall<ListUsersRequest, ListUsersResponse>(grpcClient.ListUsersAsync, grpcClient.ListUsers, effectiveSettings.ListUsersSettings); Modify_ApiCall(ref _callListUsers); Modify_ListUsersApiCall(ref _callListUsers); OnConstruction(grpcClient, effectiveSettings, clientHelper); } partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>; partial void Modify_CreateUserApiCall(ref gaxgrpc::ApiCall<CreateUserRequest, User> call); partial void Modify_GetUserApiCall(ref gaxgrpc::ApiCall<GetUserRequest, User> call); partial void Modify_UpdateUserApiCall(ref gaxgrpc::ApiCall<UpdateUserRequest, User> call); partial void Modify_DeleteUserApiCall(ref gaxgrpc::ApiCall<DeleteUserRequest, wkt::Empty> call); partial void Modify_ListUsersApiCall(ref gaxgrpc::ApiCall<ListUsersRequest, ListUsersResponse> call); partial void OnConstruction(Identity.IdentityClient grpcClient, IdentitySettings effectiveSettings, gaxgrpc::ClientHelper clientHelper); /// <summary>The underlying gRPC Identity client</summary> public override Identity.IdentityClient GrpcClient { get; } partial void Modify_CreateUserRequest(ref CreateUserRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_GetUserRequest(ref GetUserRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_UpdateUserRequest(ref UpdateUserRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_DeleteUserRequest(ref DeleteUserRequest request, ref gaxgrpc::CallSettings settings); partial void Modify_ListUsersRequest(ref ListUsersRequest request, ref gaxgrpc::CallSettings settings); /// <summary> /// Creates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override User CreateUser(CreateUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_CreateUserRequest(ref request, ref callSettings); return _callCreateUser.Sync(request, callSettings); } /// <summary> /// Creates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<User> CreateUserAsync(CreateUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_CreateUserRequest(ref request, ref callSettings); return _callCreateUser.Async(request, callSettings); } /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override User GetUser(GetUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetUserRequest(ref request, ref callSettings); return _callGetUser.Sync(request, callSettings); } /// <summary> /// Retrieves the User with the given uri. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<User> GetUserAsync(GetUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_GetUserRequest(ref request, ref callSettings); return _callGetUser.Async(request, callSettings); } /// <summary> /// Updates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override User UpdateUser(UpdateUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_UpdateUserRequest(ref request, ref callSettings); return _callUpdateUser.Sync(request, callSettings); } /// <summary> /// Updates a user. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task<User> UpdateUserAsync(UpdateUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_UpdateUserRequest(ref request, ref callSettings); return _callUpdateUser.Async(request, callSettings); } /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>The RPC response.</returns> public override void DeleteUser(DeleteUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_DeleteUserRequest(ref request, ref callSettings); _callDeleteUser.Sync(request, callSettings); } /// <summary> /// Deletes a user, their profile, and all of their authored messages. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A Task containing the RPC response.</returns> public override stt::Task DeleteUserAsync(DeleteUserRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_DeleteUserRequest(ref request, ref callSettings); return _callDeleteUser.Async(request, callSettings); } /// <summary> /// Lists all users. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable sequence of <see cref="User"/> resources.</returns> public override gax::PagedEnumerable<ListUsersResponse, User> ListUsers(ListUsersRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_ListUsersRequest(ref request, ref callSettings); return new gaxgrpc::GrpcPagedEnumerable<ListUsersRequest, ListUsersResponse, User>(_callListUsers, request, callSettings); } /// <summary> /// Lists all users. /// </summary> /// <param name="request">The request object containing all of the parameters for the API call.</param> /// <param name="callSettings">If not null, applies overrides to this RPC call.</param> /// <returns>A pageable asynchronous sequence of <see cref="User"/> resources.</returns> public override gax::PagedAsyncEnumerable<ListUsersResponse, User> ListUsersAsync(ListUsersRequest request, gaxgrpc::CallSettings callSettings = null) { Modify_ListUsersRequest(ref request, ref callSettings); return new gaxgrpc::GrpcPagedAsyncEnumerable<ListUsersRequest, ListUsersResponse, User>(_callListUsers, request, callSettings); } } public partial class ListUsersRequest : gaxgrpc::IPageRequest { } public partial class ListUsersResponse : gaxgrpc::IPageResponse<User> { /// <summary>Returns an enumerator that iterates through the resources in this response.</summary> public scg::IEnumerator<User> GetEnumerator() => Users.GetEnumerator(); sc::IEnumerator sc::IEnumerable.GetEnumerator() => GetEnumerator(); } }
#region Licence... //----------------------------------------------------------------------------- // Date: 17/10/04 Time: 2:33p // Module: csscript.cs // Classes: CSExecutor // ExecuteOptions // // This module contains the definition of the CSExecutor class. Which implements // compiling C# code and executing 'Main' method of compiled assembly // // Written by Oleg Shilo (oshilo@gmail.com) // Copyright (c) 2004-2012. All rights reserved. // // Redistribution and use of this code WITHOUT MODIFICATIONS are permitted provided that // the following conditions are met: // 1. Redistributions must retain the above copyright notice, this list of conditions // and the following disclaimer. // 2. Neither the name of an author nor the names of the contributors may be used // to endorse or promote products derived from this software without specific // prior written permission. // // Redistribution and use of this code WITH MODIFICATIONS are permitted provided that all // above conditions are met and software is not used or sold for profit. // // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED // TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Caution: Bugs are expected! //---------------------------------------------- #endregion Licence... using System; using System.IO; using System.Reflection; #if net1 using System.Collections; #else using System.Collections.Generic; #endif using System.Text; using CSScriptLibrary; using System.Runtime.InteropServices; using System.Threading; using System.CodeDom.Compiler; //using System.Windows.Forms; using System.Globalization; using System.Diagnostics; using Microsoft.CSharp; namespace csscript { internal class Profiler { static public Stopwatch Stopwatch = new Stopwatch(); } internal interface IScriptExecutor { void ShowHelp(); void ShowVersion(); void ShowPrecompilerSample(); void ShowSample(); ExecuteOptions GetOptions(); } /// <summary> /// CSExecutor is an class that implements execution of *.cs files. /// </summary> internal class CSExecutor : IScriptExecutor { #region Public interface... /// <summary> /// Force caught exceptions to be rethrown. /// </summary> public bool Rethrow { get { return rethrow; } set { rethrow = value; } } #if net1 private Settings GetPersistedSettings(ArrayList appArgs) #else private Settings GetPersistedSettings(List<string> appArgs) #endif { //read persistent settings from configuration file Settings settings = null; if (options.noConfig) { if (options.altConfig != "") settings = Settings.Load(Path.GetFullPath(options.altConfig)); else settings = Settings.Load(null, true); } else { if (!string.IsNullOrEmpty(Assembly.GetExecutingAssembly().Location)) settings = Settings.Load(Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "css_config.xml")); } if (settings != null) { options.hideTemp = settings.HideAutoGeneratedFiles; if (options.preCompilers == "") //it may be set from command-line args, which have higher precedence options.preCompilers = settings.Precompiler; options.altCompiler = settings.ExpandUseAlternativeCompiler(); options.defaultRefAssemblies = settings.ExpandDefaultRefAssemblies(); options.postProcessor = settings.ExpandUsePostProcessor(); options.apartmentState = settings.DefaultApartmentState; options.reportDetailedErrorInfo = settings.ReportDetailedErrorInfo; options.openEndDirectiveSyntax = settings.OpenEndDirectiveSyntax; options.cleanupShellCommand = settings.ExpandCleanupShellCommand(); options.doCleanupAfterNumberOfRuns = settings.DoCleanupAfterNumberOfRuns; options.inMemoryAsm = settings.InMemoryAsssembly; //options.useSurrogateHostingProcess = settings.UseSurrogateHostingProcess; options.hideCompilerWarnings = settings.HideCompilerWarnings; options.TargetFramework = settings.TargetFramework; //process default command-line arguments string[] defaultCmdArgs = settings.DefaultArguments.Split(" ".ToCharArray()); defaultCmdArgs = Utils.RemoveEmptyStrings(defaultCmdArgs); int firstDefaultScriptArg = CSSUtils.ParseAppArgs(defaultCmdArgs, this); if (firstDefaultScriptArg != defaultCmdArgs.Length) { options.scriptFileName = defaultCmdArgs[firstDefaultScriptArg]; for (int i = firstDefaultScriptArg + 1; i < defaultCmdArgs.Length; i++) if (defaultCmdArgs[i].Trim().Length != 0) appArgs.Add(defaultCmdArgs[i]); } //if (options.suppressExternalHosting) // options.useSurrogateHostingProcess = settings.UseSurrogateHostingProcess = false; } return settings; } /// <summary> /// The main entry point for the application. /// </summary> public void Execute(string[] args, PrintDelegate printDelg, string primaryScript) { try { print = printDelg != null ? printDelg : new PrintDelegate(VoidPrint); if (args.Length > 0) { #region Parse command-line arguments... //Here we need to separate application arguments from script ones. //Script engine arguments are always followed by script arguments //[appArgs][scriptFile][scriptArgs][//x] #if net1 ArrayList appArgs = new ArrayList(); #else List<string> appArgs = new List<string>(); #endif int firstScriptArg = CSSUtils.ParseAppArgs(args, this); if (!options.processFile) return; //no further processing is required (e.g. print help) if (args.Length <= firstScriptArg) { Environment.ExitCode = 1; print("No script file was specified."); return; //no script, no script arguments } //The following will also update corresponding "options" members from "settings" data Settings settings = GetPersistedSettings(appArgs); //process original command-line arguments if (options.scriptFileName == "") { options.scriptFileName = args[firstScriptArg]; firstScriptArg++; } for (int i = firstScriptArg; i < args.Length; i++) { if (args[i].Trim().Length != 0) { if (i == args.Length - 1 && string.Compare(args[args.Length - 1], "//x", true, CultureInfo.InvariantCulture) == 0) { options.startDebugger = true; options.DBG = true; } else appArgs.Add(args[i]); } } #if net1 scriptArgs = (string[])appArgs.ToArray(typeof(string)); #else scriptArgs = appArgs.ToArray(); #endif //searchDirs[0] is the script file directory. Set it only after //the script file resolved because it can be: // dir defined by the absolute/relative script file path // "%CSSCRIPT_DIR%\lib // settings.SearchDirs // CacheDir #if net1 ArrayList dirs = new ArrayList(); #else List<string> dirs = new List<string>(); #endif using (IDisposable currDir = new CurrentDirGuard()) { if (options.local) Environment.CurrentDirectory = Path.GetDirectoryName(Path.GetFullPath(options.scriptFileName)); foreach (string dir in options.searchDirs) //some directories may be already set from command-line dirs.Add(Path.GetFullPath(dir)); if (settings != null) foreach (string dir in Environment.ExpandEnvironmentVariables(settings.SearchDirs).Split(",;".ToCharArray())) if (dir.Trim() != "") dirs.Add(Path.GetFullPath(dir)); } dirs.Add(Utils.GetAssemblyDirectoryName(this.GetType().Assembly)); #if net1 options.scriptFileName = FileParser.ResolveFile(options.scriptFileName, (string[])dirs.ToArray(typeof(string))); #else options.scriptFileName = FileParser.ResolveFile(options.scriptFileName, dirs.ToArray()); #endif if (primaryScript != null) options.scriptFileNamePrimary = primaryScript; else options.scriptFileNamePrimary = options.scriptFileName; if (CSExecutor.ScriptCacheDir == "") CSExecutor.SetScriptCacheDir(options.scriptFileName); dirs.Insert(0, Path.GetDirectoryName(Path.GetFullPath(options.scriptFileName))); if (settings != null && settings.HideAutoGeneratedFiles != Settings.HideOptions.DoNotHide) dirs.Add(CSExecutor.ScriptCacheDir); #if net1 options.searchDirs = (string[])dirs.ToArray(typeof(string)); #else options.searchDirs = dirs.ToArray(); #endif CSharpParser.CmdScriptInfo[] cmdScripts = new CSharpParser.CmdScriptInfo[0]; //do quick parsing for pre/post scripts, ThreadingModel and embedded script arguments CSharpParser parser = new CSharpParser(options.scriptFileName, true); if (parser.HostOptions.Length != 0) { if (Environment.Version.Major >= 4) { foreach (string optionsSet in parser.HostOptions) foreach (string option in optionsSet.Split(' ')) if (option == "/platform:x86") options.compilerOptions += " " + option; else if (option.StartsWith("/version:")) options.TargetFramework = option.Replace("/version:", ""); options.useSurrogateHostingProcess = true; } } else { #if fork_x86 ////x86 process forking only supported for .NET 4.0+ ////This is because earlier versions of CLR would require different "platform build" runasm32.exe //if (Environment.Version.Major >= 4) //{ // foreach (string option in parser.CompilerOptions) // if (option == "/platform:x86" && Environment.Is64BitProcess) // throw new Surrogate86ProcessRequiredException(); // foreach (string arg in Utils.Concat(args, parser.Args)) // if (arg.StartsWith(CSSUtils.cmdFlagPrefix + "co:")) // foreach (string option in arg.Split('/')) // if (option == "platform:x86" && Environment.Is64BitProcess) // throw new Surrogate86ProcessRequiredException(); //} #endif } //analyse ThreadingModel to use it with execution thread if (File.Exists(options.scriptFileName)) { if (parser.ThreadingModel != ApartmentState.Unknown) options.apartmentState = parser.ThreadingModel; #if net1 ArrayList preScripts = new ArrayList(parser.CmdScripts); foreach (CSharpParser.ImportInfo info in parser.Imports) { try { string file = FileParser.ResolveFile(info.file, options.searchDirs); if (file.IndexOf(".g.cs") == -1) //non auto-generated file preScripts.AddRange(new CSharpParser(file, true).CmdScripts); } catch { } //some files may not be generated yet } cmdScripts = (CSharpParser.CmdScriptInfo[])preScripts.ToArray(typeof(CSharpParser.CmdScriptInfo)); #else List<string> newSearchDirs = new List<string>(options.searchDirs); using (IDisposable currDir = new CurrentDirGuard()) { Environment.CurrentDirectory = Path.GetDirectoryName(Path.GetFullPath(options.scriptFileName)); foreach (string dir in parser.ExtraSearchDirs) newSearchDirs.Add(Path.GetFullPath(dir)); foreach (string file in parser.RefAssemblies) { string path = file.Replace("\"", ""); string dir = Path.GetDirectoryName(path); if (dir != "") newSearchDirs.Add(Path.GetFullPath(dir)); } options.searchDirs = newSearchDirs.ToArray(); } List<CSharpParser.CmdScriptInfo> preScripts = new List<CSharpParser.CmdScriptInfo>(parser.CmdScripts); foreach (CSharpParser.ImportInfo info in parser.Imports) { try { string file = FileParser.ResolveFile(info.file, options.searchDirs); if (file.IndexOf(".g.cs") == -1) //non auto-generated file { using (IDisposable currDir = new CurrentDirGuard()) { CSharpParser impParser = new CSharpParser(file, true, options.searchDirs); Environment.CurrentDirectory = Path.GetDirectoryName(file); foreach (string dir in impParser.ExtraSearchDirs) newSearchDirs.Add(Path.GetFullPath(dir)); options.searchDirs = newSearchDirs.ToArray(); } preScripts.AddRange(new CSharpParser(file, true).CmdScripts); } } catch { } //some files may not be generated yet } cmdScripts = preScripts.ToArray(); #endif if (primaryScript == null)//this is a primary script { int firstEmbeddedScriptArg = CSSUtils.ParseAppArgs(parser.Args, this); if (firstEmbeddedScriptArg != -1) { for (int i = firstEmbeddedScriptArg; i < parser.Args.Length; i++) appArgs.Add(parser.Args[i]); } #if net1 scriptArgs = (string[])appArgs.ToArray(typeof(string)); #else scriptArgs = appArgs.ToArray(); #endif } } #endregion Parse command-line arguments... ExecuteOptions originalOptions = (ExecuteOptions)options.Clone(); //preserve master script options string originalCurrDir = Environment.CurrentDirectory; //run prescripts //Note: during the secondary script execution static options will be modified (this is required for //browsing in CSSEnvironment with reflection). So reset it back with originalOptions after the execution is completed foreach (CSharpParser.CmdScriptInfo info in cmdScripts) if (info.preScript) { Environment.CurrentDirectory = originalCurrDir; info.args[1] = FileParser.ResolveFile(info.args[1], originalOptions.searchDirs); CSExecutor exec = new CSExecutor(info.abortOnError, originalOptions); if (originalOptions.DBG) { #if net1 ArrayList newArgs = new ArrayList(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "dbg"); info.args = (string[])newArgs.ToArray(typeof(string)); #else List<string> newArgs = new List<string>(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "dbg"); info.args = newArgs.ToArray(); #endif } if (originalOptions.verbose) { #if net1 ArrayList newArgs = new ArrayList(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "verbose"); info.args = (string[])newArgs.ToArray(typeof(string)); #else List<string> newArgs = new List<string>(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "verbose"); info.args = newArgs.ToArray(); #endif } if (info.abortOnError) exec.Execute(info.args, printDelg, originalOptions.scriptFileName); else exec.Execute(info.args, null, originalOptions.scriptFileName); } options = originalOptions; ExecuteOptions.options = originalOptions; //update static members as well Environment.CurrentDirectory = originalCurrDir; options.compilationContext = CSSUtils.GenerateCompilationContext(parser, options); //Run main script //We need to start the execution in a new thread as it is the only way //to set desired ApartmentState under .NET 2.0 Thread newThread = new Thread(new ThreadStart(this.ExecuteImpl)); #if net1 newThread.ApartmentState = options.apartmentState; #else newThread.SetApartmentState(options.apartmentState); #endif newThread.Start(); newThread.Join(); if (lastException != null) if (lastException is SurrogateHostProcessRequiredException) throw lastException; else throw new ApplicationException("Script " + options.scriptFileName + " cannot be executed.", lastException); //run postscripts foreach (CSharpParser.CmdScriptInfo info in cmdScripts) if (!info.preScript) { Environment.CurrentDirectory = originalCurrDir; info.args[1] = FileParser.ResolveFile(info.args[1], originalOptions.searchDirs); CSExecutor exec = new CSExecutor(info.abortOnError, originalOptions); if (originalOptions.DBG) { #if net1 ArrayList newArgs = new ArrayList(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "dbg"); info.args = (string[])newArgs.ToArray(typeof(string)); #else List<string> newArgs = new List<string>(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "dbg"); info.args = newArgs.ToArray(); #endif } if (originalOptions.verbose) { #if net1 ArrayList newArgs = new ArrayList(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "verbose"); info.args = (string[])newArgs.ToArray(typeof(string)); #else List<string> newArgs = new List<string>(); newArgs.AddRange(info.args); newArgs.Insert(0, CSSUtils.cmdFlagPrefix + "verbose"); info.args = newArgs.ToArray(); #endif } if (info.abortOnError) { exec.Rethrow = true; exec.Execute(info.args, printDelg, originalOptions.scriptFileName); } else exec.Execute(info.args, null, originalOptions.scriptFileName); } } else { ShowHelp(); } } catch (Surrogate86ProcessRequiredException) { throw; } catch (SurrogateHostProcessRequiredException) { throw; } catch (Exception e) { Exception ex = e; if (e is System.Reflection.TargetInvocationException) ex = e.InnerException; if (rethrow) { throw ex; } else { Environment.ExitCode = 1; if (options.reportDetailedErrorInfo) print(ex.ToString()); else print(ex.Message); //Mono friendly } } } /// <summary> /// Returns custom application config file. /// </summary> internal string GetCustomAppConfig(string[] args) { try { if (args.Length > 0) { int firstScriptArg = CSSUtils.ParseAppArgs(args, this); if (args.Length > firstScriptArg) { Settings settings = null; if (options.noConfig) { if (options.altConfig != "") settings = Settings.Load(Path.GetFullPath(options.altConfig)); //read persistent settings from configuration file } else { if (!string.IsNullOrEmpty(Assembly.GetExecutingAssembly().Location)) settings = Settings.Load(Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "css_config.xml")); } if (!options.useScriptConfig && (settings == null || settings.DefaultArguments.IndexOf(CSSUtils.cmdFlagPrefix + "sconfig") == -1)) return ""; string script = args[firstScriptArg]; #if net1 ArrayList dirs = new ArrayList(); #else List<string> dirs = new List<string>(); #endif string libDir = Environment.ExpandEnvironmentVariables("%CSSCRIPT_DIR%" + Path.DirectorySeparatorChar + "lib"); if (!libDir.StartsWith("%")) dirs.Add(libDir); if (settings != null) dirs.AddRange(Environment.ExpandEnvironmentVariables(settings.SearchDirs).Split(",;".ToCharArray())); dirs.Add(Utils.GetAssemblyDirectoryName(Assembly.GetExecutingAssembly())); #if net1 string[] searchDirs = (string[])dirs.ToArray(typeof(string)); #else string[] searchDirs = dirs.ToArray(); #endif script = FileParser.ResolveFile(script, searchDirs); if (options.customConfigFileName != "") return Path.Combine(Path.GetDirectoryName(script), options.customConfigFileName); if (File.Exists(script + ".config")) return script + ".config"; else if (File.Exists(Path.ChangeExtension(script, ".exe.config"))) return Path.ChangeExtension(script, ".exe.config"); } } } catch { //ignore the exception because it will be raised (again) and handled by the Execute method } return ""; } /// <summary> /// Dummy 'print' to suppress displaying application messages. /// </summary> private static void VoidPrint(string msg) { } /// <summary> /// This method implements compiling and execution of the script. /// </summary> public Exception lastException; /// <summary> /// This method implements compiling and execution of the script. /// </summary> private void ExecuteImpl() { try { //System.Diagnostics.Debug.Assert(false); if (options.processFile) { if (options.local) Environment.CurrentDirectory = Path.GetDirectoryName(Path.GetFullPath(options.scriptFileName)); if (!options.noLogo) { Console.WriteLine(AppInfo.appLogo); } if (Environment.GetEnvironmentVariable("EntryScript") == null) Environment.SetEnvironmentVariable("EntryScript", Path.GetFullPath(options.scriptFileName)); { CSSUtils.VerbosePrint("> ----------------", options); CSSUtils.VerbosePrint(" TragetFramework: " + options.TargetFramework, options); CSSUtils.VerbosePrint(" CurrentDirectory: " + Environment.CurrentDirectory, options); CSSUtils.VerbosePrint(" Executing: " + Path.GetFullPath(options.scriptFileName), options); CSSUtils.VerbosePrint(" Script arguments: ", options); for (int i = 0; i < scriptArgs.Length; i++) CSSUtils.VerbosePrint(" " + i + " - " + scriptArgs[i], options); CSSUtils.VerbosePrint(" SearchDirectories: ", options); for (int i = 0; i < options.searchDirs.Length; i++) CSSUtils.VerbosePrint(" " + i + " - " + options.searchDirs[i], options); CSSUtils.VerbosePrint("> ----------------", options); CSSUtils.VerbosePrint("", options); } bool fileUnlocked = false; using (Mutex fileLock = new Mutex(false, "Process." + options.scriptFileName.GetHashCode().ToString())) try { int start = Environment.TickCount; fileLock.WaitOne(3000, false); //let other thread/process (if any) to finish loading/compiling the same file; 3 seconds should be enough, if you need more use more sophisticated synchronization //Trace.WriteLine(">>> Waited " + (Environment.TickCount - start)); //compile string assemblyFileName = options.useCompiled ? GetAvailableAssembly(options.scriptFileName) : null; if (options.useCompiled && options.useSmartCaching) { if (assemblyFileName != null) { if (MetaDataItems.IsOutOfDate(options.scriptFileName, assemblyFileName)) { assemblyFileName = null; } } } if (options.forceCompile && assemblyFileName != null) { File.Delete(assemblyFileName); assemblyFileName = null; } //add searchDirs to PATH to support search path for native dlls //need to do this before compilation or execution string path = Environment.GetEnvironmentVariable("PATH"); foreach (string s in options.searchDirs) path += ";" + s; #if net1 SetEnvironmentVariable("PATH", path); #else Environment.SetEnvironmentVariable("PATH", path); #endif //it is possible that there are fully compiled/cached and up to date script but no host compiled yet string host = ScriptLauncherBuilder.GetLauncherName(assemblyFileName); bool surrogateHostMissing = (options.useSurrogateHostingProcess && (!File.Exists(host) || !CSSUtils.HaveSameTimestamp(host, assemblyFileName))); if (options.buildExecutable || !options.useCompiled || (options.useCompiled && assemblyFileName == null) || options.forceCompile || surrogateHostMissing) { try { CSSUtils.VerbosePrint("Compiling script...", options); CSSUtils.VerbosePrint("", options); TimeSpan initializationTime = Profiler.Stopwatch.Elapsed; Profiler.Stopwatch.Reset(); Profiler.Stopwatch.Start(); assemblyFileName = Compile(options.scriptFileName); if (Profiler.Stopwatch.IsRunning) { Profiler.Stopwatch.Stop(); TimeSpan compilationTime = Profiler.Stopwatch.Elapsed; CSSUtils.VerbosePrint("Initialization time: " + initializationTime.TotalMilliseconds + " msec", options); CSSUtils.VerbosePrint("Compilation time: " + compilationTime.TotalMilliseconds + " msec", options); CSSUtils.VerbosePrint("> ----------------", options); CSSUtils.VerbosePrint("", options); } } catch { print("Error: Specified file could not be compiled.\n"); throw; } finally { try { fileLock.ReleaseMutex(); } catch { } fileUnlocked = true; } } else { Profiler.Stopwatch.Stop(); CSSUtils.VerbosePrint(" Loading script from cache...", options); CSSUtils.VerbosePrint("", options); CSSUtils.VerbosePrint(" Cache file: \n " + assemblyFileName, options); CSSUtils.VerbosePrint("> ----------------", options); CSSUtils.VerbosePrint("Initialization time: " + Profiler.Stopwatch.Elapsed.TotalMilliseconds + " msec", options); CSSUtils.VerbosePrint("> ----------------", options); } //execute if (!options.supressExecution) { try { if (options.useSurrogateHostingProcess) { throw new SurrogateHostProcessRequiredException(assemblyFileName, scriptArgs, options.startDebugger); } if (options.startDebugger) { System.Diagnostics.Debugger.Launch(); if (System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); } if (options.useCompiled || options.cleanupShellCommand != "") { AssemblyResolver.CacheProbingResults = true; //it is reasonable safe to do the agressive probing as we are executing only a single script (standalone execution not a script hosting model) //despite the name of the class the execution (assembly loading) will be in the current domain //I am just reusing some functionality of the RemoteExecutor class. RemoteExecutor executor = new RemoteExecutor(options.searchDirs); executor.ExecuteAssembly(assemblyFileName, scriptArgs); } else { //Load and execute assembly in a different domain to make it possible to unload assembly before clean up AssemblyExecutor executor = new AssemblyExecutor(assemblyFileName, "AsmExecution"); executor.Execute(scriptArgs); } } catch (SurrogateHostProcessRequiredException) { throw; } catch { print("Error: Specified file could not be executed.\n"); throw; } //cleanup if (File.Exists(assemblyFileName) && !options.useCompiled && options.cleanupShellCommand == "") { try { File.Delete(assemblyFileName); } catch { } } if (options.cleanupShellCommand != "") { try { string counterFile = Path.Combine(GetScriptTempDir(), "counter.txt"); int prevRuns = 0; using (StreamReader sr = new StreamReader(counterFile)) { prevRuns = int.Parse(sr.ReadToEnd()); } if (prevRuns > options.doCleanupAfterNumberOfRuns) { prevRuns = 1; string[] cmd = options.ExtractShellCommand(options.cleanupShellCommand); if (cmd.Length > 1) Process.Start(cmd[0], cmd[1]); else Process.Start(cmd[0]); } else prevRuns++; using (StreamWriter sw = new StreamWriter(counterFile)) sw.Write(prevRuns); } catch { } } } } finally { try { if (!fileUnlocked) fileLock.ReleaseMutex(); } //using fileUnlocked to avoid throwing unnecessary exception catch { } } } } catch (Exception e) { Exception ex = e; if (e is System.Reflection.TargetInvocationException) ex = e.InnerException; if (rethrow || e is SurrogateHostProcessRequiredException) { lastException = ex; } else { Environment.ExitCode = 1; if (options.reportDetailedErrorInfo) print(ex.ToString()); else print(ex.Message); //Mono friendly } } } /// <summary> /// Compiles C# script file into assembly. /// </summary> public string Compile(string scriptFile, string assemblyFile, bool debugBuild) { string result = null; try { if (assemblyFile != null) options.forceOutputAssembly = assemblyFile; else { string cacheFile = Path.Combine(CSExecutor.GetCacheDirectory(scriptFile), Path.GetFileName(scriptFile) + ".compiled"); options.forceOutputAssembly = cacheFile; } if (debugBuild) options.DBG = true; result = Compile(scriptFile); } catch (UnauthorizedAccessException e) { //changed, but file is locked if (assemblyFile != null) options.forceOutputAssembly = assemblyFile; else { string cacheFile = Path.Combine(CSExecutor.GetCacheDirectory(scriptFile), Path.GetFileName(scriptFile) + Guid.NewGuid().ToString("N") + ".compiled"); options.forceOutputAssembly = cacheFile; } if (debugBuild) options.DBG = true; result = Compile(scriptFile); } return result; } #endregion Public interface... #region Class data... /// <summary> /// C# Script arguments array (sub array of application arguments array). /// </summary> string[] scriptArgs; /// <summary> /// Callback to print application messages to appropriate output. /// </summary> static PrintDelegate print; /// <summary> /// Container for parsed command line arguments /// </summary> static internal ExecuteOptions options = new ExecuteOptions(); /// <summary> /// Flag to force to rethrow critical exceptions /// </summary> bool rethrow; #endregion Class data... #region Class methods... /// <summary> /// Constructor /// </summary> public CSExecutor() { rethrow = false; options = new ExecuteOptions(); } /// <summary> /// Constructor /// </summary> public CSExecutor(bool rethrow, ExecuteOptions optionsBase) { this.rethrow = rethrow; options = new ExecuteOptions(); //force to read all relative options data from the config file options.noConfig = optionsBase.noConfig; options.altConfig = optionsBase.altConfig; } public ExecuteOptions GetOptions() { return options; } /// <summary> /// Checks/returns if compiled C# script file (ScriptName + ".compiled") available and valid. /// </summary> internal string GetAvailableAssembly(string scripFileName) { string retval = null; string asmFileName = options.hideTemp != Settings.HideOptions.DoNotHide ? Path.Combine(CSExecutor.ScriptCacheDir, Path.GetFileName(scripFileName) + ".compiled") : scripFileName + ".c"; if (File.Exists(asmFileName) && File.Exists(scripFileName)) { FileInfo scriptFile = new FileInfo(scripFileName); FileInfo asmFile = new FileInfo(asmFileName); if (asmFile.LastWriteTime == scriptFile.LastWriteTime && asmFile.LastWriteTimeUtc == scriptFile.LastWriteTimeUtc) { retval = asmFileName; } } return retval; } private class UniqueAssemblyLocations { public static explicit operator string[](UniqueAssemblyLocations obj) { string[] retval = new string[obj.locations.Count]; obj.locations.Values.CopyTo(retval, 0); return retval; } public void AddAssembly(string location) { string assemblyID = Path.GetFileName(location); if (!locations.ContainsKey(assemblyID)) locations[assemblyID] = location; } System.Collections.Hashtable locations = new System.Collections.Hashtable(); } private ICodeCompiler LoadDefaultCompiler() { #pragma warning disable 618 #if net1 return new CSharpCodeProvider().CreateCompiler(); #else IDictionary<string, string> providerOptions = new Dictionary<string, string>(); providerOptions["CompilerVersion"] = options.TargetFramework; return new CSharpCodeProvider(providerOptions).CreateCompiler(); #endif #pragma warning restore 618 } private ICodeCompiler LoadCompiler(string scriptFileName, ref string[] filesToInject) { ICodeCompiler compiler; if (options.InjectScriptAssemblyAttribute && (options.altCompiler == "" || scriptFileName.EndsWith(".cs"))) //injection code syntax is C# compatible { filesToInject = Utils.Concat(filesToInject, CSSUtils.GetScriptedCodeAttributeInjectionCode(scriptFileName)); } if (options.altCompiler == "") { compiler = LoadDefaultCompiler(); } else { try { Assembly asm; if (Path.IsPathRooted(options.altCompiler)) { //absolute path asm = Assembly.LoadFrom(options.altCompiler); } else { //look in the following folders // 1. Executable location // 2. Executable location + "Lib" // 3. CSScriptLibrary.dll location string probingDir = Path.GetFullPath(Utils.GetAssemblyDirectoryName(Assembly.GetExecutingAssembly())); string altCompilerFile = Path.Combine(probingDir, options.altCompiler); if (File.Exists(altCompilerFile)) { asm = Assembly.LoadFrom(altCompilerFile); } else { probingDir = Path.Combine(probingDir, "Lib"); altCompilerFile = Path.Combine(probingDir, options.altCompiler); if (File.Exists(altCompilerFile)) { asm = Assembly.LoadFrom(altCompilerFile); } else { //in case of CSScriptLibrary.dll "this" is not defined in the main executable probingDir = Path.GetFullPath(Utils.GetAssemblyDirectoryName(this.GetType().Assembly)); altCompilerFile = Path.Combine(probingDir, options.altCompiler); if (File.Exists(altCompilerFile)) { asm = Assembly.LoadFrom(altCompilerFile); } else throw new ApplicationException("Cannot find alternative compiler \"" + options.altCompiler + "\""); } } } Type[] types = asm.GetModules()[0].FindTypes(Module.FilterTypeName, "CSSCodeProvider"); #if net1 MethodInfo method = types[0].GetMethod("CreateCompiler"); compiler = (ICodeCompiler)method.Invoke(null, new object[] { scriptFileName }); //the script file name may influence what compiler will be created (e.g. *.vb vs. *.cs) #else MethodInfo method = types[0].GetMethod("CreateCompilerVersion"); if (method != null) { compiler = (ICodeCompiler)method.Invoke(null, new object[] { scriptFileName, options.TargetFramework }); //the script file name may influence what compiler will be created (e.g. *.vb vs. *.cs) } else { method = types[0].GetMethod("CreateCompiler"); compiler = (ICodeCompiler)method.Invoke(null, new object[] { scriptFileName }); //the script file name may influence what compiler will be created (e.g. *.vb vs. *.cs) } #endif } catch (Exception ex) { try { //Debug.Assert(false); //try to recover from incorrectly configured CS-Script but only if not hosted by another app if (!Assembly.GetExecutingAssembly().Location.ToLower().EndsWith("csscriptlibrary.dll")) { string sccssdir = Environment.GetEnvironmentVariable("CSSCRIPT_DIR"); if (sccssdir != null)//CS-Script is installed/configured { if (Directory.Exists(sccssdir) && !File.Exists(options.altCompiler)) //Invalid alt-compiler configured print("\nCannot find alternative compiler (" + options.altCompiler + "). Loading default compiler instead."); options.altCompiler = ""; return LoadDefaultCompiler(); } } } catch { } throw new ApplicationException("Cannot use alternative compiler (" + options.altCompiler + "). You may want to adjust 'CSSCRIPT_DIR' environment variable or disable alternative compiler by setting 'useAlternativeCompiler' to empty value in the css_config.xml file.\n\nError Details:", ex); } } return compiler; } private void AddReferencedAssemblies(CompilerParameters compilerParams, string scriptFileName, ScriptParser parser) { UniqueAssemblyLocations requestedRefAsms = new UniqueAssemblyLocations(); #if net1 ArrayList refAssemblies = new ArrayList(); #else List<string> refAssemblies = new List<string>(); #endif if (options.shareHostRefAssemblies) foreach (Assembly asm in AppDomain.CurrentDomain.GetAssemblies()) { try { if (asm is System.Reflection.Emit.AssemblyBuilder) continue; if (!File.Exists(asm.Location) || asm.Location.Contains("mscorlib")) continue; //#if net4 // if (asm.Location.Contains("mscorlib")) //CLR 4.0 adds mscorlib automatically // continue; //#endif requestedRefAsms.AddAssembly(asm.Location); } catch { //Under ASP.NET some assemblies do not have location (e.g. dynamically built/emitted assemblies) //in such case NotSupportedException will be raised //In fact ignore all exceptions as we should continue if for whatever reason assembly the location cannot be obtained } } //add assemblies referenced from command line string[] cmdLineAsms = options.refAssemblies; if (!options.useSurrogateHostingProcess) { string[] defaultAsms = options.defaultRefAssemblies.Replace(" ", "").Split(";,".ToCharArray()); foreach (string asmName in Utils.Concat(defaultAsms, cmdLineAsms)) { if (asmName == "") continue; foreach (string asm in AssemblyResolver.FindAssembly(asmName, options.searchDirs)) requestedRefAsms.AddAssembly(NormalizeGacAssemblyPath(asm)); } } AssemblyResolver.ignoreFileName = Path.GetFileNameWithoutExtension(scriptFileName) + ".dll"; //add local and global assemblies (if found) that have the same assembly name as a namespace foreach (string nmSpace in parser.ReferencedNamespaces) { bool ignore = false; //user may nominate namespaces to be excluded fro namespace-to-asm resolving foreach (string ignoreNamespace in parser.IgnoreNamespaces) if (ignoreNamespace == nmSpace) ignore = true; if (!ignore) foreach (string asm in AssemblyResolver.FindAssembly(nmSpace, options.searchDirs)) requestedRefAsms.AddAssembly(NormalizeGacAssemblyPath(asm)); } //add assemblies referenced from code foreach (string asmName in parser.ReferencedAssemblies) { string asm = asmName.Replace("\"", ""); if (Path.IsPathRooted(asm)) //absolute path { //not-searchable assemblies requestedRefAsms.AddAssembly(NormalizeGacAssemblyPath(asm)); } else { string[] files = AssemblyResolver.FindAssembly(asm, options.searchDirs); if (files.Length > 0) { foreach (string asmFile in files) requestedRefAsms.AddAssembly(NormalizeGacAssemblyPath(asmFile)); } else { requestedRefAsms.AddAssembly(asm); } } } compilerParams.ReferencedAssemblies.AddRange((string[])requestedRefAsms); } private string NormalizeGacAssemblyPath(string asm) { //e.g. v3.5 string currentFramework = string.Format("v{0}.{1}", Environment.Version.Major, Environment.Version.MajorRevision); if (options.useSurrogateHostingProcess && options.TargetFramework != currentFramework) { if (asm.IndexOf("\\GAC_MSIL\\") != -1) //GAC assembly return Path.GetFileName(asm); else return asm; } else return asm; } /// <summary> /// Compiles C# script file. /// </summary> private string Compile(string scriptFileName) { //System.Diagnostics.Debug.Assert(false); bool generateExe = options.buildExecutable; string scriptDir = Path.GetDirectoryName(scriptFileName); string assemblyFileName = ""; //options may be uninitialized in case we are compiling from CSScriptLibrary if (options.searchDirs.Length == 0) options.searchDirs = new string[] { scriptDir }; //parse source file in order to find all referenced assemblies //ASSUMPTION: assembly name is the same as namespace + ".dll" //if script doesn't follow this assumption user will need to //specify assemblies explicitly ScriptParser parser = new ScriptParser(scriptFileName, options.searchDirs); options.searchDirs = Utils.RemoveDuplicates( Utils.Concat( parser.SearchDirs, //parser.searchDirs may be updated as result of script parsing Utils.GetAssemblyDirectoryName(Assembly.GetExecutingAssembly()))); string[] filesToInject = new string[0]; ICodeCompiler compiler = LoadCompiler(scriptFileName, ref filesToInject); CompilerParameters compilerParams = new CompilerParameters(); foreach (string file in parser.Precompilers) if (options.preCompilers == "") options.preCompilers = file; else options.preCompilers += "," + file; foreach (string option in parser.CompilerOptions) Utils.AddCompilerOptions(compilerParams, option); if (options.DBG) Utils.AddCompilerOptions(compilerParams, "/d:DEBUG /d:TRACE"); if (options.compilerOptions != string.Empty) Utils.AddCompilerOptions(compilerParams, options.compilerOptions); compilerParams.IncludeDebugInformation = options.DBG; compilerParams.GenerateExecutable = generateExe; compilerParams.GenerateInMemory = false; string[] filesToCompile = new string[parser.FilesToCompile.Length]; Array.Copy(parser.FilesToCompile, filesToCompile, parser.FilesToCompile.Length); PrecompilationContext context = CSSUtils.Precompile(scriptFileName, filesToCompile, options); string[] additionalDependencies = context.NewDependencies.ToArray(); AddReferencedAssemblies(compilerParams, scriptFileName, parser); //add resources referenced from code foreach (string resFile in parser.ReferencedResources) { string file = null; foreach (string dir in options.searchDirs) { file = Path.IsPathRooted(resFile) ? Path.GetFullPath(resFile) : Path.Combine(dir, resFile); if (File.Exists(file)) break; } if (file == null) file = resFile; Utils.AddCompilerOptions(compilerParams, "\"/res:" + file + "\""); //e.g. /res:C:\\Scripting.Form1.resources"; } if (options.forceOutputAssembly != "") { assemblyFileName = options.forceOutputAssembly; } else { if (generateExe) assemblyFileName = Path.Combine(scriptDir, Path.GetFileNameWithoutExtension(scriptFileName) + ".exe"); else if (options.useCompiled || options.DLLExtension) { if (options.DLLExtension) assemblyFileName = Path.Combine(scriptDir, Path.GetFileNameWithoutExtension(scriptFileName) + ".dll"); else if (options.hideTemp != Settings.HideOptions.DoNotHide) assemblyFileName = Path.Combine(CSExecutor.ScriptCacheDir, Path.GetFileName(scriptFileName) + ".compiled"); else assemblyFileName = scriptFileName + ".compiled"; } else { string tempFile = GetScriptTempFile(); assemblyFileName = Path.ChangeExtension(tempFile, ".dll"); } } if (generateExe && options.buildWinExecutable) Utils.AddCompilerOptions(compilerParams, "/target:winexe"); if (File.Exists(assemblyFileName)) File.Delete(assemblyFileName); compilerParams.OutputAssembly = assemblyFileName; //compilerParams.ReferencedAssemblies.Add(this.GetType().Assembly.Location); CompilerResults results; if (generateExe) { results = CompileAssembly(compiler, compilerParams, filesToCompile); } else { if (filesToInject.Length != 0) { filesToCompile = Utils.Concat(filesToCompile, filesToInject); } CSSUtils.VerbosePrint(" Output file: \n " + assemblyFileName, options); CSSUtils.VerbosePrint("", options); CSSUtils.VerbosePrint(" Files to compile: ", options); int i = 0; foreach (string file in filesToCompile) CSSUtils.VerbosePrint(" " + i++ + " - " + file, options); CSSUtils.VerbosePrint("", options); CSSUtils.VerbosePrint(" References: ", options); i = 0; foreach (string file in compilerParams.ReferencedAssemblies) CSSUtils.VerbosePrint(" " + i++ + " - " + file, options); CSSUtils.VerbosePrint("> ----------------", options); string originalExtension = Path.GetExtension(compilerParams.OutputAssembly); if (originalExtension != ".dll") { //Despite the usage of .dll file name is not required for MS C# compiler we need to do this because //some compilers (Mono, VB) accept only dll or exe file extensions. compilerParams.OutputAssembly = Path.ChangeExtension(compilerParams.OutputAssembly, ".dll"); if (File.Exists(compilerParams.OutputAssembly)) File.Delete(compilerParams.OutputAssembly); results = CompileAssembly(compiler, compilerParams, filesToCompile); if (File.Exists(compilerParams.OutputAssembly)) { int attempts = 0; while (true) { //There were reports of MS C# compiler (csc.exe) not releasing OutputAssembly file //after compilation finished. Thus wait a little... //BTW. on Mono 1.2.4 it happens all the time try { attempts++; File.Move(compilerParams.OutputAssembly, Path.ChangeExtension(compilerParams.OutputAssembly, originalExtension)); break; } catch { if (attempts > 2) { //yep we can get here as Mono 1.2.4 on Windows never ever releases the assembly File.Copy(compilerParams.OutputAssembly, Path.ChangeExtension(compilerParams.OutputAssembly, originalExtension), true); break; } else Thread.Sleep(100); } } } } else { if (File.Exists(compilerParams.OutputAssembly)) File.Delete(compilerParams.OutputAssembly); results = CompileAssembly(compiler, compilerParams, filesToCompile); } } ProcessCompilingResult(results, compilerParams, parser, scriptFileName, assemblyFileName, additionalDependencies); if (options.useSurrogateHostingProcess && !options.supressExecution) { new ScriptLauncherBuilder().BuildSurrogateLauncher(assemblyFileName, options.TargetFramework, compilerParams, options.apartmentState); } return assemblyFileName; } private CompilerResults CompileAssembly(ICodeCompiler compiler, CompilerParameters compilerParams, string[] filesToCompile) { CompilerResults retval = compiler.CompileAssemblyFromFileBatch(compilerParams, filesToCompile); if (!retval.Errors.HasErrors && options.postProcessor != "") { string rawAssembly = compilerParams.OutputAssembly + ".raw"; try { MethodInfo postProcessor = Assembly.LoadFrom(options.postProcessor) .GetType("CSSPostProcessor", true) .GetMethod("Process"); string[] refAsms = new string[compilerParams.ReferencedAssemblies.Count]; compilerParams.ReferencedAssemblies.CopyTo(refAsms, 0); postProcessor.Invoke(null, new object[] { compilerParams.OutputAssembly, refAsms, options.searchDirs }); } catch (Exception e) { throw new ApplicationException("Cannon post-process compiled script (set UsePostProcessor to \"null\" if the problem persist).\n" + e.Message); } } return retval; } private void ProcessCompilingResult(CompilerResults results, CompilerParameters compilerParams, ScriptParser parser, string scriptFileName, string assemblyFileName, string[] additionalDependencies) { LastCompileResult = results; if (results.Errors.Count != 0) { throw CompilerException.Create(results.Errors, options.hideCompilerWarnings); } else { if (!options.DBG) //.pdb and imported files might be needed for the debugger { parser.DeleteImportedFiles(); string pdbFile = Path.Combine(Path.GetDirectoryName(assemblyFileName), Path.GetFileNameWithoutExtension(assemblyFileName) + ".pdb"); if (File.Exists(pdbFile)) File.Delete(pdbFile); } if (options.useCompiled) { if (options.useSmartCaching) { MetaDataItems depInfo = new MetaDataItems(); string[] searchDirs = Utils.RemovePathDuplicates(options.searchDirs); //save imported scripts info depInfo.AddItems(parser.ImportedFiles, false, searchDirs); //additionalDependencies (precompilers) are warranted to be as absolute path so no need to pass searchDirs or isAssembly depInfo.AddItems(additionalDependencies, false, new string[0]); //save referenced local assemblies info string[] newProbingDirs = depInfo.AddItems(compilerParams.ReferencedAssemblies, true, searchDirs); foreach (string dir in newProbingDirs) options.AddSearchDir(dir); //needed to be added at Compilation for further resolving during the Invoking stage depInfo.StampFile(assemblyFileName); } FileInfo scriptFile = new FileInfo(scriptFileName); FileInfo asmFile = new FileInfo(assemblyFileName); if (scriptFile != null && asmFile != null) { asmFile.LastWriteTimeUtc = scriptFile.LastWriteTimeUtc; } } } } internal CompilerResults LastCompileResult; [DllImport("kernel32.dll", SetLastError = true)] private static extern uint GetTempFileName(string lpPathName, string lpPrefixString, uint uUnique, [Out] StringBuilder lpTempFileName); /// <summary> /// Returns the name of the temporary file in the CSSCRIPT subfolder of Path.GetTempPath(). /// </summary> /// <returns>Temporary file name.</returns> static public string GetScriptTempFile() { lock (typeof(CSExecutor)) { return Path.Combine(GetScriptTempDir(), Guid.NewGuid().ToString() + ".tmp"); } } /// <summary> /// Returns the name of the temporary folder in the CSSCRIPT subfolder of Path.GetTempPath(). /// <para>Under certain circumstances it may be desirable to the use the alternative location for the CS-Script temporary files. /// In such cases use SetScriptTempDir() to set the alternative location. /// </para> /// </summary> /// <returns>Temporary directory name.</returns> static public string GetScriptTempDir() { if (tempDir == null) { tempDir = Path.Combine(Path.GetTempPath(), "CSSCRIPT"); if (!Directory.Exists(tempDir)) Directory.CreateDirectory(tempDir); } return tempDir; } static string tempDir = null; /// <summary> /// Sets the location for the CS-Script temporary files directory. /// </summary> /// <param name="path">The path for the temporary directory.</param> static public void SetScriptTempDir(string path) { tempDir = path; } /// <summary> /// Generates the name of the cache directory for the specified script file. /// </summary> /// <param name="file">Script file name.</param> /// <returns>Cache directory name.</returns> public static string GetCacheDirectory(string file) { string commonCacheDir = Path.Combine(CSExecutor.GetScriptTempDir(), "Cache"); string cacheDir; if (Utils.IsLinux()) cacheDir = Path.Combine(commonCacheDir, Path.GetDirectoryName(Path.GetFullPath(file)) .GetHashCode() .ToString()); else cacheDir = Path.Combine(commonCacheDir, Path.GetDirectoryName(Path.GetFullPath(file)) .ToLower() //Win is not case-sensitive .GetHashCode() .ToString()); if (!Directory.Exists(cacheDir)) Directory.CreateDirectory(cacheDir); return cacheDir; } ///<summary> /// Contains the name of the temporary cache folder in the CSSCRIPT subfolder of Path.GetTempPath(). The cache folder is specific for every script file. /// </summary> static public string ScriptCacheDir { get { return cacheDir; } } /// <summary> /// Generates the name of the temporary cache folder in the CSSCRIPT subfolder of Path.GetTempPath(). The cache folder is specific for every script file. /// </summary> /// <param name="scriptFile">script file</param> static public void SetScriptCacheDir(string scriptFile) { string newCacheDir = GetCacheDirectory(scriptFile); //this will also create the directory if it does not exist string infoFile = Path.Combine(newCacheDir, "css_info.txt"); //if (!File.Exists(infoFile)) //zos profiling using (StreamWriter sw = new StreamWriter(infoFile)) sw.Write(Environment.Version.ToString() + "\n" + Path.GetDirectoryName(Path.GetFullPath(scriptFile)) + "\n"); cacheDir = newCacheDir; } private static string cacheDir = ""; /// <summary> /// Prints Help info. /// </summary> public void ShowHelp() { print(HelpProvider.BuildCommandInterfaceHelp()); } /// <summary> /// Show sample C# script file. /// </summary> public void ShowSample() { print(HelpProvider.BuildSampleCode()); } /// <summary> /// Show sample precompiler C# script file. /// </summary> public void ShowPrecompilerSample() { print(HelpProvider.BuildPrecompilerSampleCode()); } /// <summary> /// Show CS-Script version information. /// </summary> public void ShowVersion() { print(HelpProvider.BuildVersionInfo()); } [DllImport("kernel32.dll", SetLastError = true)] static extern bool SetEnvironmentVariable(string lpName, string lpValue); #endregion Class methods... } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Reflection; using ParquetSharp.Bytes; using ParquetSharp.External; using ParquetSharp.Format; using ParquetSharp.Hadoop.Metadata; namespace ParquetSharp.Hadoop { /** * Factory to produce compressors and decompressors that operate on java * direct memory, without requiring a copy into heap memory (where possible). */ class DirectCodecFactory : CodecFactory, IDisposable { private static readonly Log LOG = Log.getLog(typeof(DirectCodecFactory)); private readonly ByteBufferAllocator allocator; // Any of these can be null depending on the version of hadoop on the classpath private static readonly System.Type DIRECT_DECOMPRESSION_CODEC_CLASS; private static readonly MethodInfo DECOMPRESS_METHOD; private static readonly MethodInfo CREATE_DIRECT_DECOMPRESSOR_METHOD; static DirectCodecFactory() { System.Type tempClass = null; MethodInfo tempCreateMethod = null; MethodInfo tempDecompressMethod = null; try { tempClass = Class.forName("org.apache.hadoop.io.compress.DirectDecompressionCodec"); tempCreateMethod = tempClass.GetMethod("createDirectDecompressor"); tempDecompressMethod = tempClass.GetMethod("decompress", new[] { typeof(ByteBuffer), typeof(ByteBuffer) }); } catch (ClassNotFoundException e) { // do nothing, the class will just be assigned null } catch (NoSuchMethodException e) { // do nothing, the method will just be assigned null } DIRECT_DECOMPRESSION_CODEC_CLASS = tempClass; CREATE_DIRECT_DECOMPRESSOR_METHOD = tempCreateMethod; DECOMPRESS_METHOD = tempDecompressMethod; } /** * See docs on CodecFactory#createDirectCodecFactory which is how this class is * exposed publicly and is just a pass-through factory method for this constructor * to hide the rest of this class from public access. */ internal DirectCodecFactory(Configuration config, ByteBufferAllocator allocator, int pageSize) : base(config, pageSize) { Preconditions.checkNotNull(allocator, "allocator"); Preconditions.checkState(allocator.isDirect(), "A %s requires a direct buffer allocator be provided.", GetType().Name); this.allocator = allocator; } private ByteBuffer ensure(ByteBuffer buffer, int size) { if (buffer == null) { buffer = allocator.allocate(size); } else if (buffer.capacity() >= size) { buffer.clear(); } else { release(buffer); buffer = allocator.allocate(size); } return buffer; } ByteBuffer release(ByteBuffer buffer) { if (buffer != null) { allocator.release(buffer); } return null; } protected override BytesCompressor createCompressor(CompressionCodecName codecName) { CompressionCodec codec = getCodec(codecName); if (codec == null) { return new NoopCompressor(); } else if (codecName == CompressionCodecName.SNAPPY) { // avoid using the default Snappy codec since it allocates direct buffers at awkward spots. return new SnappyCompressor(); } else { // todo: create class similar to the SnappyCompressor for zlib and exclude it as // snappy is above since it also generates allocateDirect calls. return new HeapBytesCompressor(this, codecName); } } protected override BytesDecompressor createDecompressor(CompressionCodecName codecName) { CompressionCodec codec = getCodec(codecName); if (codec == null) { return new NoopDecompressor(); } else if (codecName == CompressionCodecName.SNAPPY) { return new SnappyDecompressor(); } else if (DirectCodecPool.INSTANCE.codec(codec).supportsDirectDecompression()) { return new FullDirectDecompressor(codecName); } else { return new IndirectDecompressor(codec); } } public void close() { release(); } /** * Wrapper around legacy hadoop compressors that do not implement a direct memory * based version of the decompression algorithm. */ public class IndirectDecompressor : BytesDecompressor { private readonly Decompressor decompressor; public IndirectDecompressor(CompressionCodec codec) { this.decompressor = DirectCodecPool.INSTANCE.codec(codec).borrowDecompressor(); } public override BytesInput decompress(BytesInput bytes, int uncompressedSize) { decompressor.reset(); byte[] inputBytes = bytes.toByteArray(); decompressor.setInput(inputBytes, 0, inputBytes.Length); byte[] output = new byte[uncompressedSize]; decompressor.decompress(output, 0, uncompressedSize); return BytesInput.from(output); } public override void decompress(ByteBuffer input, int compressedSize, ByteBuffer output, int uncompressedSize) { decompressor.reset(); byte[] inputBytes = new byte[compressedSize]; input.position(0); input.get(inputBytes); decompressor.setInput(inputBytes, 0, inputBytes.Length); byte[] outputBytes = new byte[uncompressedSize]; decompressor.decompress(outputBytes, 0, uncompressedSize); output.clear(); output.put(outputBytes); } protected internal override void release() { DirectCodecPool.INSTANCE.returnDecompressor(decompressor); } } /** * Wrapper around new Hadoop compressors that implement a direct memory * based version of a particular decompression algorithm. To maintain * compatibility with Hadoop 1.x these classes that implement * {@link org.apache.hadoop.io.compress.DirectDecompressionCodec} * are currently retrieved and have their decompression method invoked * with reflection. */ public class FullDirectDecompressor : BytesDecompressor { private readonly object decompressor; private HeapBytesDecompressor extraDecompressor; public FullDirectDecompressor(CompressionCodecName codecName) { CompressionCodec codec = getCodec(codecName); this.decompressor = DirectCodecPool.INSTANCE.codec(codec).borrowDirectDecompressor(); this.extraDecompressor = new HeapBytesDecompressor(codecName); } public override BytesInput decompress(BytesInput compressedBytes, int uncompressedSize) { return extraDecompressor.decompress(compressedBytes, uncompressedSize); } public override void decompress(ByteBuffer input, int compressedSize, ByteBuffer output, int uncompressedSize) { output.clear(); try { DECOMPRESS_METHOD.invoke(decompressor, (ByteBuffer)input.limit(compressedSize), (ByteBuffer)output.limit(uncompressedSize)); } catch (MemberAccessException e) { throw new DirectCodecPool.ParquetCompressionCodecException(e); } catch (TargetInvocationException e) { throw new DirectCodecPool.ParquetCompressionCodecException(e); } output.position(uncompressedSize); } protected internal override void release() { DirectCodecPool.INSTANCE.returnDirectDecompressor(decompressor); extraDecompressor.release(); } } public class NoopDecompressor : BytesDecompressor { public override void decompress(ByteBuffer input, int compressedSize, ByteBuffer output, int uncompressedSize) { Preconditions.checkArgument(compressedSize == uncompressedSize, "Non-compressed data did not have matching compressed and uncompressed sizes."); output.clear(); output.put((ByteBuffer)input.duplicate().position(0).limit(compressedSize)); } public override BytesInput decompress(BytesInput bytes, int uncompressedSize) { return bytes; } protected internal override void release() { } } public class SnappyDecompressor : BytesDecompressor { private HeapBytesDecompressor extraDecompressor; public SnappyDecompressor() { this.extraDecompressor = new HeapBytesDecompressor(CompressionCodecName.SNAPPY); } public override BytesInput decompress(BytesInput bytes, int uncompressedSize) { return extraDecompressor.decompress(bytes, uncompressedSize); } public override void decompress(ByteBuffer src, int compressedSize, ByteBuffer dst, int uncompressedSize) { dst.clear(); int size = Snappy.uncompress(src, dst); dst.limit(size); } protected internal override void release() { } } public class SnappyCompressor : BytesCompressor { // TODO - this outgoing buffer might be better off not being shared, this seems to // only work because of an extra copy currently happening where this interface is // be consumed private ByteBuffer incoming; private ByteBuffer outgoing; /** * Compress a given buffer of bytes * @param bytes * @return * @throws IOException */ public override BytesInput compress(BytesInput bytes) { int maxOutputSize = Snappy.maxCompressedLength((int)bytes.size()); ByteBuffer bufferIn = bytes.toByteBuffer(); outgoing = ensure(outgoing, maxOutputSize); int size; if (bufferIn.isDirect()) { size = Snappy.compress(bufferIn, outgoing); } else { // Snappy library requires buffers be direct this.incoming = ensure(this.incoming, (int)bytes.size()); this.incoming.put(bufferIn); this.incoming.flip(); size = Snappy.compress(this.incoming, outgoing); } return BytesInput.from(outgoing, 0, (int)size); } public override CompressionCodecName getCodecName() { return CompressionCodecName.SNAPPY; } protected internal override void release() { outgoing = DirectCodecFactory.This.release(outgoing); incoming = DirectCodecFactory.This.release(incoming); } } public class NoopCompressor : BytesCompressor { public NoopCompressor() { } public override BytesInput compress(BytesInput bytes) { return bytes; } public override CompressionCodecName getCodecName() { return CompressionCodecName.UNCOMPRESSED; } protected internal override void release() { } } class DirectCodecPool { public static readonly DirectCodecPool INSTANCE = new DirectCodecPool(); private readonly Dictionary<CompressionCodec, CodecPool> codecs = Collections.synchronizedMap(new Dictionary<CompressionCodec, CodecPool>()); private readonly Dictionary<System.Type, GenericObjectPool> directDePools = Collections .synchronizedMap(new Dictionary<System.Type, GenericObjectPool>()); private readonly Dictionary<System.Type, GenericObjectPool> dePools = Collections .synchronizedMap(new Dictionary<System.Type, GenericObjectPool>()); private readonly Dictionary<System.Type, GenericObjectPool> cPools = Collections .synchronizedMap(new Dictionary<System.Type, GenericObjectPool>()); private DirectCodecPool() { } public class CodecPool { private readonly GenericObjectPool compressorPool; private readonly GenericObjectPool decompressorPool; private readonly GenericObjectPool directDecompressorPool; private readonly bool supportDirectDecompressor; private const string BYTE_BUF_IMPL_NOT_FOUND_MSG = "Unable to find ByteBuffer based %s for codec %s, will use a byte array based implementation instead."; internal CodecPool(CompressionCodec codec) { try { bool supportDirectDecompressor = codec.GetType() == DIRECT_DECOMPRESSION_CODEC_CLASS; compressorPool = new GenericObjectPool(new CompressorObjectFactory(codec), int.MaxValue); object com = compressorPool.borrowObject(); if (com != null) { cPools.put(com.GetType(), compressorPool); compressorPool.returnObject(com); } else { if (Log.DEBUG) { LOG.debug(string.Format(BYTE_BUF_IMPL_NOT_FOUND_MSG, "compressor", codec.GetType().FullName)); } } decompressorPool = new GenericObjectPool(new DecompressorObjectFactory(codec), int.MaxValue); object decom = decompressorPool.borrowObject(); if (decom != null) { dePools.put(decom.GetType(), decompressorPool); decompressorPool.returnObject(decom); } else { if (Log.DEBUG) { LOG.debug(string.Format(BYTE_BUF_IMPL_NOT_FOUND_MSG, "decompressor" + codec.GetType().FullName)); } } if (supportDirectDecompressor) { directDecompressorPool = new GenericObjectPool( /* new BasePoolableObjectFactory() { public object makeObject() { return CREATE_DIRECT_DECOMPRESSOR_METHOD.invoke(DIRECT_DECOMPRESSION_CODEC_CLASS); } } */ new DirectDecompressorObjectFactory(), int.MaxValue); object ddecom = directDecompressorPool.borrowObject(); if (ddecom != null) { directDePools.put(ddecom.GetType(), directDecompressorPool); directDecompressorPool.returnObject(ddecom); } else { supportDirectDecompressor = false; if (Log.DEBUG) { LOG.debug(string.Format(BYTE_BUF_IMPL_NOT_FOUND_MSG, "compressor" + codec.GetType().FullName)); } } } else { directDecompressorPool = null; } this.supportDirectDecompressor = supportDirectDecompressor; } catch (Exception e) { throw new ParquetCompressionCodecException("Error creating compression codec pool.", e); } } public object borrowDirectDecompressor() { Preconditions.checkArgument(supportDirectDecompressor, "Tried to get a direct Decompressor from a non-direct codec."); try { return directDecompressorPool.borrowObject(); } catch (Exception e) { throw new ParquetCompressionCodecException(e); } } public bool supportsDirectDecompression() { return supportDirectDecompressor; } public Decompressor borrowDecompressor() { return borrow(decompressorPool); } public Compressor borrowCompressor() { return borrow(compressorPool); } } public CodecPool codec(CompressionCodec codec) { CodecPool pools; if (!codecs.TryGetValue(codec, out pools)) { lock (this) { if (!codecs.TryGetValue(codec, out pools)) { pools = new CodecPool(codec); codecs.Add(codec, pools); } } } return pools; } private void returnToPool(object obj, Dictionary<System.Type, GenericObjectPool> pools) { try { GenericObjectPool pool = pools.get(obj.GetType()); if (pool == null) { throw new IllegalStateException("Received unexpected compressor or decompressor, " + "cannot be returned to any available pool: " + obj.GetType().Name); } pool.returnObject(obj); } catch (Exception e) { throw new ParquetCompressionCodecException(e); } } /** * Borrow an object from a pool. * * @param pool - the pull to borrow from, must not be null * @return - an object from the pool */ public T borrow<T>(GenericObjectPool pool) { try { return (T)pool.borrowObject(); } catch (Exception e) { throw new ParquetCompressionCodecException(e); } } public void returnCompressor(Compressor compressor) { returnToPool(compressor, cPools); } public void returnDecompressor(Decompressor decompressor) { returnToPool(decompressor, dePools); } public void returnDirectDecompressor(object decompressor) { returnToPool(decompressor, directDePools); } public class ParquetCompressionCodecException : ParquetRuntimeException { public ParquetCompressionCodecException() { } public ParquetCompressionCodecException(string message, Exception cause) : base(message, cause) { } public ParquetCompressionCodecException(string message) : base(message) { } public ParquetCompressionCodecException(Exception cause) : base(cause) { } } class CompressorObjectFactory : BasePoolableObjectFactory { readonly CompressionCodec codec; public CompressorObjectFactory(CompressionCodec codec) { this.codec = codec; } public object makeObject() { return codec.createCompressor(); } } class DecompressorObjectFactory : BasePoolableObjectFactory { readonly CompressionCodec codec; public DecompressorObjectFactory(CompressionCodec codec) { this.codec = codec; } public object makeObject() { return codec.createDecompressor(); } } class DirectDecompressorObjectFactory : BasePoolableObjectFactory { public object makeObject() { return CREATE_DIRECT_DECOMPRESSOR_METHOD.invoke(DIRECT_DECOMPRESSION_CODEC_CLASS); } } } } }
// Copyright (c) Charlie Poole, Rob Prouse and Contributors. MIT License - see LICENSE.txt using System; using System.Collections; using System.Linq; using NUnit.Framework.Internal; using NUnit.TestUtilities.Comparers; namespace NUnit.Framework.Constraints { [TestFixture] public class CollectionOrderedConstraintTests { private readonly string NL = Environment.NewLine; #region Ordering Tests [TestCaseSource(nameof(OrderedByData))] public void IsOrderedBy(IEnumerable collection, Constraint constraint) { Assert.That(collection, constraint); } private static readonly object[] OrderedByData = new[] { // Simple Ordering new TestCaseData( new[] { "x", "y", "z" }, Is.Ordered), new TestCaseData( new[] { 1, 2, 3 }, Is.Ordered), new TestCaseData( new[] { "x", "y", "z" }, Is.Ordered.Ascending), new TestCaseData( new[] { 1, 2, 3 }, Is.Ordered.Ascending), new TestCaseData( new[] { "z", "y", "x" }, Is.Ordered.Descending), new TestCaseData( new[] { 3, 2, 1 }, Is.Ordered.Descending), new TestCaseData( new[] { "x", "x", "z" }, Is.Ordered), new TestCaseData( new[] { null, "x", "y" }, Is.Ordered), new TestCaseData( new[] {"y", "x", null}, Is.Ordered.Descending), new TestCaseData( new[] { "x", null, "y" }, Is.Not.Ordered), // Ordered By Single Property new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.By("Value") ), new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.By("Value").Ascending ), new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.Ascending.By("Value") ), new TestCaseData( new[] { new TestClass1(3), new TestClass1(2), new TestClass1(1) }, Is.Ordered.By("Value").Descending ), new TestCaseData( new[] { new TestClass1(3), new TestClass1(2), new TestClass1(1) }, Is.Ordered.Descending.By("Value") ), new TestCaseData( new[] { new TestClass1(1), new TestClass1(2), new TestClass1(3) }, Is.Ordered.By("Value").Using(ObjectComparer.Default) ), new TestCaseData( new object[] { new TestClass1(1), new TestClass2(2) }, Is.Ordered.By("Value") ), // Ordered By Two Properties new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.Ascending.By("A").Then.Ascending.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 1), new TestClass3("ABC", 42), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").Ascending.Then.By("B").Ascending ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("XYZ", 99), new TestClass3("XYZ", 2) }, Is.Not.Ordered.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass3("XYZ", 2), new TestClass3("ABC", 1), new TestClass3("ABC", 42) }, Is.Ordered.By("A").Descending.Then.By("B") ), new TestCaseData( new [] { new TestClass3("XYZ", 2), new TestClass3("ABC", 1), new TestClass3("ABC", 42) }, Is.Ordered.Descending.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("ABC", 1), new TestClass3("XYZ", 2) }, Is.Ordered.By("A").Ascending.Then.By("B").Descending ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("ABC", 1), new TestClass3("XYZ", 2) }, Is.Ordered.Ascending.By("A").Then.Descending.By("B") ), new TestCaseData( new [] { new TestClass3("ABC", 42), new TestClass3("ABC", 1), new TestClass3("XYZ", 2) }, Is.Not.Ordered.By("A").Then.By("B") ), new TestCaseData( new[] { new TestClass3("XYZ", 2), new TestClass3("ABC", 42), new TestClass3("ABC", 1) }, Is.Ordered.By("A").Descending.Then.By("B").Descending ), new TestCaseData( new[] { new TestClass3("XYZ", 2), new TestClass3("ABC", 42), new TestClass3("ABC", 1) }, Is.Ordered.Descending.By("A").Then.Descending.By("B") ), // Ordered by Single Field new TestCaseData( new[] { new TestClass5(10), new TestClass5(20), new TestClass5(30) }, Is.Ordered.By("Value") ), new TestCaseData( new[] { new TestClass5(10), new TestClass5(20), new TestClass5(30) }, Is.Ordered.By("Value").Ascending ), new TestCaseData( new[] { new TestClass5(10), new TestClass5(20), new TestClass5(30) }, Is.Ordered.Ascending.By("Value") ), new TestCaseData( new[] { new TestClass5(30), new TestClass5(20), new TestClass5(10) }, Is.Ordered.By("Value").Descending ), new TestCaseData( new[] { new TestClass5(30), new TestClass5(20), new TestClass5(10) }, Is.Ordered.Descending.By("Value") ), new TestCaseData( new[] { new TestClass5(10), new TestClass5(20), new TestClass5(30) }, Is.Ordered.By("Value").Using(ObjectComparer.Default) ), new TestCaseData( new object[] { new TestClass5(10), new TestClass2(20) }, Is.Ordered.By("Value") ), // Ordered By Two Fields new TestCaseData( new [] { new TestClass6("ABC", 10), new TestClass6("ABC", 420), new TestClass6("XYZ", 20) }, Is.Ordered.By("A").By("B") ), new TestCaseData( new [] { new TestClass6("ABC", 10), new TestClass6("ABC", 420), new TestClass6("XYZ", 20) }, Is.Ordered.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass6("ABC", 10), new TestClass6("ABC", 420), new TestClass6("XYZ", 20) }, Is.Ordered.Ascending.By("A").Then.Ascending.By("B") ), new TestCaseData( new [] { new TestClass6("ABC", 10), new TestClass6("ABC", 420), new TestClass6("XYZ", 20) }, Is.Ordered.By("A").Ascending.Then.By("B").Ascending ), new TestCaseData( new [] { new TestClass6("ABC", 420), new TestClass6("XYZ", 990), new TestClass6("XYZ", 20) }, Is.Not.Ordered.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass6("XYZ", 20), new TestClass6("ABC", 10), new TestClass6("ABC", 420) }, Is.Ordered.By("A").Descending.Then.By("B") ), new TestCaseData( new [] { new TestClass6("XYZ", 20), new TestClass6("ABC", 10), new TestClass6("ABC", 420) }, Is.Ordered.Descending.By("A").Then.By("B") ), new TestCaseData( new [] { new TestClass6("ABC", 420), new TestClass6("ABC", 10), new TestClass6("XYZ", 20) }, Is.Ordered.By("A").Ascending.Then.By("B").Descending ), new TestCaseData( new [] { new TestClass6("ABC", 420), new TestClass6("ABC", 10), new TestClass6("XYZ", 20) }, Is.Ordered.Ascending.By("A").Then.Descending.By("B") ), new TestCaseData( new [] { new TestClass6("ABC", 420), new TestClass6("ABC", 10), new TestClass6("XYZ", 20) }, Is.Not.Ordered.By("A").Then.By("B") ), new TestCaseData( new[] { new TestClass6("XYZ", 20), new TestClass6("ABC", 420), new TestClass6("ABC", 10) }, Is.Ordered.By("A").Descending.Then.By("B").Descending ), new TestCaseData( new[] { new TestClass6("XYZ", 20), new TestClass6("ABC", 420), new TestClass6("ABC", 10) }, Is.Ordered.Descending.By("A").Then.Descending.By("B") ), }; #endregion #region Error Message Tests [Test] public void IsOrdered_Fails() { var expectedMessage = " Expected: collection ordered" + NL + " But was: < \"x\", \"z\", \"y\" >" + NL + " Ordering breaks at index [2]: \"y\"" + NL; var ex = Assert.Throws<AssertionException>(() => Assert.That(new[] { "x", "z", "y" }, Is.Ordered)); Assert.That(ex.Message, Is.EqualTo(expectedMessage)); } [Test] public void IsOrdered_DisplaysBreakingItemForHugeCollections() { var actual = Enumerable.Range(0, 100).ToArray(); actual[90] = 1000; var expectedMessage = " Expected: collection ordered" + NL + " But was: < ...83, 84, 85, 86, 87, 88, 89, 1000, 91, 92... >" + NL + " Ordering breaks at index [91]: 91" + NL; var ex = Assert.Throws<AssertionException>(() => Assert.That(actual, Is.Ordered)); Assert.That(ex.Message, Is.EqualTo(expectedMessage)); } #endregion #region Custom Comparer Tests [Test] public void IsOrdered_HandlesCustomComparison() { AlwaysEqualComparer comparer = new AlwaysEqualComparer(); Assert.That(new[] { new object(), new object() }, Is.Ordered.Using(comparer)); Assert.That(comparer.CallCount, Is.GreaterThan(0), "TestComparer was not called"); } [Test] public void ExceptionThrownForMultipleComparersInStep() { Assert.That(() => Is.Ordered.Using(new TestComparer()).Using(new AlwaysEqualComparer()), Throws.TypeOf<InvalidOperationException>()); } [Test] public void MultipleComparersUsedInDifferentSteps() { var comparer1 = new TestComparer(); var comparer2 = new AlwaysEqualComparer(); var collection = new[] { new TestClass3("XYZ", 2), new TestClass3("ABC", 42), new TestClass3("ABC", 1) }; Assert.That(collection, Is.Ordered.By("A").Using(comparer1).Then.By("B").Using(comparer2)); // First comparer is called for every pair of items in the collection Assert.That(comparer1.CallCount, Is.EqualTo(2), "First comparer should be called twice"); // Second comparer is only called where the first property matches Assert.That(comparer2.CallCount, Is.EqualTo(1), "Second comparer should be called once"); } [Test] public void IsOrdered_HandlesCustomComparison2() { TestComparer comparer = new TestComparer(); Assert.That(new[] { 2, 1 }, Is.Ordered.Using(comparer)); Assert.That(comparer.CallCount, Is.GreaterThan(0), "TestComparer was not called"); } [Test] public void UsesProvidedGenericComparer() { var comparer = new GenericComparer<int>(); Assert.That(new[] { 1, 2 }, Is.Ordered.Using(comparer)); Assert.That(comparer.WasCalled, "Comparer was not called"); } [Test] public void UsesProvidedGenericComparison() { var comparer = new GenericComparison<int>(); Assert.That(new[] { 1, 2 }, Is.Ordered.Using(comparer.Delegate)); Assert.That(comparer.WasCalled, "Comparer was not called"); } [Test] public void UsesProvidedLambda() { Comparison<int> comparer = (x, y) => x.CompareTo(y); Assert.That(new[] { 1, 2 }, Is.Ordered.Using(comparer)); } #endregion #region Exception Tests [Test] public void ExceptionThrownForRepeatedAscending() { Assert.That(() => Is.Ordered.Ascending.Ascending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void ExceptionThrownForRepeatedDescending() { Assert.That(() => Is.Ordered.Descending.Descending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void ExceptionThrownForAscendingPlusDescending() { Assert.That(() => Is.Ordered.Ascending.Descending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void ExceptionThrownForAscendingByDescending() { Assert.That(() => Is.Ordered.Ascending.By("A").Descending, Throws.TypeOf<InvalidOperationException>()); } [Test] public void IsOrderedByProperty_ThrowsOnNull() { var ex = Assert.Throws<ArgumentNullException>(() => Assert.That(new[] { new TestClass4("x"), null, new TestClass4("z") }, Is.Ordered.By("Value"))); Assert.That(ex.Message, Does.Contain("index 1")); } [Test] public void IsOrdered_TypesMustBeComparable() { Assert.Throws<ArgumentException>(() => Assert.That(new object[] { 1, "x" }, Is.Ordered)); } [Test] public void IsOrdered_AtLeastOneArgMustImplementIComparable() { Assert.Throws<ArgumentException>(() => Assert.That(new[] { new object(), new object() }, Is.Ordered)); } [TestCaseSource(nameof(InvalidOrderedByData))] public void IsOrdered_ThrowsOnMissingProperty(object[] collection, string property, string expectedIndex) { Assert.That(() => Assert.That(collection, Is.Ordered.By(property)), Throws.ArgumentException.With.Message.Contain(expectedIndex)); } private static readonly object[] InvalidOrderedByData = new[] { new TestCaseData( new object [] { "a", "b" }, "A", "index 0"), new TestCaseData( new object [] { new TestClass3("a", 1), "b" }, "A", "index 1"), }; #endregion #region Test Classes public class TestClass1 { public int Value { get; } public TestClass1(int value) { Value = value; } public override string ToString() { return Value.ToString(); } } private class TestClass2 { public int Value { get; } public TestClass2(int value) { Value = value; } public override string ToString() { return Value.ToString(); } } public class TestClass3 { public string A { get; } public int B { get; } public TestClass3(string a, int b) { A = a; B = b; } public override string ToString() { return A.ToString() + "," + B.ToString(); } } public class TestClass4 { public readonly string A; public TestClass4(string a) { A = a; } public override string ToString() { return A; } } public class TestClass5 { public int Value; public TestClass5(int value) { Value = value; } public override string ToString() { return Value.ToString(); } } public class TestClass6 { public string A; public int B; public TestClass6(string a, int b) { A = a; B = b; } public override string ToString() { return A.ToString() + "," + B.ToString(); } } #endregion } }
using System; using System.Data; using System.Data.Common; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using Orleans.Internal; using Orleans.Tests.SqlUtils; using UnitTests.General; using Xunit; namespace UnitTests.StorageTests.AdoNet { public class StreamingTest { public int Id { get; set; } public byte[] StreamData { get; set; } } public class RelationalStoreTests : IClassFixture<RelationalStoreTests.Fixture> { private const string testDatabaseName = "OrleansStreamTest"; //This timeout limit should be clearly less than that defined in RelationalStorageForTesting.CancellationTestQuery. private readonly TimeSpan CancellationTestTimeoutLimit = TimeSpan.FromSeconds(1); private readonly TimeSpan StreamCancellationTimeoutLimit = TimeSpan.FromSeconds(15); private const int MiB = 1048576; private const int StreamSizeToBeInsertedInBytes = MiB * 2; private const int NumberOfParallelStreams = 5; private readonly RelationalStorageForTesting sqlServerStorage; private readonly RelationalStorageForTesting mySqlStorage; public class Fixture { public Fixture() { try { SqlServerStorage = RelationalStorageForTesting.SetupInstance(AdoNetInvariants.InvariantNameSqlServer, testDatabaseName).GetAwaiter().GetResult(); } catch (Exception ex) { Console.WriteLine("Failed to initialize SQL Server for RelationalGeneralTests: {0}", ex); } try { MySqlStorage = RelationalStorageForTesting.SetupInstance(AdoNetInvariants.InvariantNameMySql, testDatabaseName).GetAwaiter().GetResult(); } catch (Exception ex) { Console.WriteLine("Failed to initialize MySQL for RelationalGeneralTests: {0}", ex); } } public RelationalStorageForTesting SqlServerStorage { get; private set; } public RelationalStorageForTesting MySqlStorage { get; private set; } } public RelationalStoreTests(Fixture fixture) { this.sqlServerStorage = fixture.SqlServerStorage; this.mySqlStorage = fixture.MySqlStorage; } [SkippableFact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("AdoNet")] public async Task Streaming_SqlServer_Test() { using(var tokenSource = new CancellationTokenSource(StreamCancellationTimeoutLimit)) { var isMatch = await Task.WhenAll(InsertAndReadStreamsAndCheckMatch(sqlServerStorage, StreamSizeToBeInsertedInBytes, NumberOfParallelStreams, tokenSource.Token)); Assert.True(isMatch.All(i => i), "All inserted streams should be equal to read streams."); } } [SkippableFact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MySql")] public async Task Streaming_MySql_Test() { using(var tokenSource = new CancellationTokenSource(StreamCancellationTimeoutLimit)) { var isMatch = await Task.WhenAll(InsertAndReadStreamsAndCheckMatch(mySqlStorage, StreamSizeToBeInsertedInBytes, NumberOfParallelStreams, tokenSource.Token)); Assert.True(isMatch.All(i => i), "All inserted streams should be equal to read streams."); } } [SkippableFact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("AdoNet")] public async Task CancellationToken_SqlServer_Test() { await CancellationTokenTest(sqlServerStorage, CancellationTestTimeoutLimit); } [SkippableFact, TestCategory("Functional"), TestCategory("Persistence"), TestCategory("MySql")] public async Task CancellationToken_MySql_Test() { await CancellationTokenTest(mySqlStorage, CancellationTestTimeoutLimit); } private static Task<bool>[] InsertAndReadStreamsAndCheckMatch(RelationalStorageForTesting sut, int streamSize, int countOfStreams, CancellationToken cancellationToken) { Skip.If(sut == null, "Database was not initialized correctly"); //Stream in and steam out three binary streams in parallel. var streamChecks = new Task<bool>[countOfStreams]; var sr = new SafeRandom(); for(int i = 0; i < countOfStreams; ++i) { int streamId = i; streamChecks[i] = Task.Run(async () => { var rb = new byte[streamSize]; sr.NextBytes(rb); await InsertIntoDatabaseUsingStream(sut, streamId, rb, cancellationToken); var dataStreamFromTheDb = await ReadFromDatabaseUsingAsyncStream(sut, streamId, cancellationToken); return dataStreamFromTheDb.StreamData.SequenceEqual(rb); }); } return streamChecks; } private static async Task InsertIntoDatabaseUsingStream(RelationalStorageForTesting sut, int streamId, byte[] dataToInsert, CancellationToken cancellationToken) { Skip.If(sut == null, "Database was not initialized correctly"); //The dataToInsert could be inserted here directly, but it wouldn't be streamed. using (var ms = new MemoryStream(dataToInsert)) { await sut.Storage.ExecuteAsync(sut.StreamTestInsert, command => { var p1 = command.CreateParameter(); p1.ParameterName = "Id"; p1.Value = streamId; command.Parameters.Add(p1); //MySQL does not support streams in and for the time being there //is not a custom stream defined. For ideas, see http://dev.mysql.com/doc/refman/5.7/en/blob.html //for string operations for blobs and http://rusanu.com/2010/12/28/download-and-upload-images-from-sql-server-with-asp-net-mvc/ //on how one could go defining one. var p2 = command.CreateParameter(); p2.ParameterName = "StreamData"; p2.Value = dataToInsert; p2.DbType = DbType.Binary; p2.Size = dataToInsert.Length; command.Parameters.Add(p2); }, cancellationToken, CommandBehavior.SequentialAccess).ConfigureAwait(false); } } private static async Task<StreamingTest> ReadFromDatabaseUsingAsyncStream(RelationalStorageForTesting sut, int streamId, CancellationToken cancellationToken) { Skip.If(sut == null, "Database was not initialized correctly"); return (await sut.Storage.ReadAsync(sut.StreamTestSelect, command => { var p = command.CreateParameter(); p.ParameterName = "streamId"; p.Value = streamId; command.Parameters.Add(p); }, async (selector, resultSetCount, canellationToken) => { var streamSelector = (DbDataReader)selector; var id = await streamSelector.GetValueAsync<int>("Id"); using(var ms = new MemoryStream()) { using(var downloadStream = streamSelector.GetStream(1, sut.Storage)) { await downloadStream.CopyToAsync(ms); return new StreamingTest { Id = id, StreamData = ms.ToArray() }; } } }, cancellationToken, CommandBehavior.SequentialAccess).ConfigureAwait(false)).Single(); } private static Task CancellationTokenTest(RelationalStorageForTesting sut, TimeSpan timeoutLimit) { Skip.If(sut == null, "Database was not initialized correctly"); using (var tokenSource = new CancellationTokenSource(timeoutLimit)) { try { //Here one second is added to the task timeout limit in order to account for the delays. //The delays are mainly in the underlying ADO.NET libraries and database. var task = sut.Storage.ReadAsync<int>(sut.CancellationTestQuery, tokenSource.Token); if(!task.Wait(timeoutLimit.Add(TimeSpan.FromSeconds(1)))) { Assert.True(false, string.Format("Timeout limit {0} ms exceeded.", timeoutLimit.TotalMilliseconds)); } } catch(Exception ex) { //There can be a DbException due to the operation being forcefully cancelled... //... Unless this is a test for a provider which does not support for cancellation. //The exception is wrapped into an AggregrateException due to the test arrangement of hard synchronous //wait to force for actual cancellation check and remove "natural timeout" causes. var innerException = ex?.InnerException; if(sut.Storage.SupportsCommandCancellation()) { //If the operation is cancelled already before database calls, a OperationCancelledException //will be thrown in any case. Assert.True(innerException is DbException || innerException is OperationCanceledException, $"Unexpected exception: {ex}"); } else { Assert.True(innerException is OperationCanceledException, $"Unexpected exception: {ex}"); } } } return Task.CompletedTask; } } }
using System; using System.Collections.Generic; using Kemel.Orm.Entity; using Kemel.Orm.Schema; using System.Text; using Kemel.Orm.Constants; using Kemel.Orm.Providers; namespace Kemel.Orm.QueryDef { public class TableQueryCollection : List<TableQuery> { new public TableQuery Add(TableQuery item) { base.Add(item); return item; } } public class TableQuery { #region Properties public TableSchema TableSchema { get; set; } public string TableName { get; set; } public Query SubQuery { get; set; } public string Alias { get; set; } private bool blnWithNolock = false; public bool NoLock { get { return this.blnWithNolock; } set { this.blnWithNolock = value; } } private FunctionCollection lstFunctions = null; public FunctionCollection Functions { get { if (this.lstFunctions == null) this.lstFunctions = new FunctionCollection(); return this.lstFunctions; } } private AggregateCollection lstAggregateds = null; public AggregateCollection Aggregateds { get { if (this.lstAggregateds == null) this.lstAggregateds = new AggregateCollection(); return this.lstAggregateds; } } private ColumnQueryCollectoin lstColumns = null; public ColumnQueryCollectoin ColumnsQuery { get { if (this.lstColumns == null) this.lstColumns = new ColumnQueryCollectoin(); return this.lstColumns; } } public Query Parent { get; set; } public string PatternName { get { return (this.TableSchema == null ? this.TableName : this.TableSchema.Name); } } public string ColumnPrefix { get { if (string.IsNullOrEmpty(this.Alias)) { return this.PatternName; } else { return this.Alias; } } } public bool HasAlias { get { return !string.IsNullOrEmpty(this.Alias); } } #endregion #region Static Methods public static TableQuery From(TableSchema table, Query parent) { return new TableQuery(table, parent); } public static TableQuery From<TEtt>(Query parent) where TEtt : EntityBase { TableSchema table = SchemaContainer.GetSchema<TEtt>(); return new TableQuery(table, parent); } public static TableQuery From(EntityBase entity, Query parent) { TableSchema table = SchemaContainer.GetSchema(entity); return new TableQuery(table, parent); } public static TableQuery From(string tableName, Query parent) { return new TableQuery(tableName, parent); } public static TableQuery From(Query subQuery, Query parent) { return new TableQuery(subQuery, parent); } #endregion #region Methods public ColumnSchema FindColumnSchema(string columnName) { if (this.TableSchema != null) { columnName = columnName.ToUpper(); foreach (ColumnSchema column in this.TableSchema.Columns) { if (column.Name.ToUpper().Equals(columnName)) return column; } } return null; } public ColumnQuery FindColumnQuery(string columnName) { columnName = columnName.ToUpper(); foreach (ColumnQuery column in this.ColumnsQuery) { if (column.PatternColumnName.ToUpper().Equals(columnName)) return column; } return new ColumnQuery(columnName, this); } public ColumnQuery FindColumnQuery(Kemel.Orm.Schema.ColumnSchema columnFrom) { if (columnFrom.Parent.Name == this.TableSchema.Name) { string columnName = columnFrom.Name.ToUpper(); foreach (ColumnQuery column in this.ColumnsQuery) { if (column.PatternColumnName.ToUpper().Equals(columnName)) return column; } } else { return this.Parent.FindColumnQueryInTables(columnFrom); } return new ColumnQuery(columnFrom, this); } public ColumnQuery FindColumnQuery(string tableName, string columnName) { TableQuery tq = this.Parent.FindTableQuery(tableName); if (tq == null) throw new OrmException(Messages.TableDoesNotExistInQuery); columnName = columnName.ToUpper(); foreach (ColumnQuery column in tq.ColumnsQuery) { if (column.PatternColumnName.ToUpper().Equals(columnName)) return column; } return new ColumnQuery(columnName, tq); } public ColumnQuery FindColumnQuery(string tableName, Kemel.Orm.Schema.ColumnSchema columnFrom) { TableQuery tq = this.Parent.FindTableQuery(tableName); if (tq == null) throw new OrmException(Messages.TableDoesNotExistInQuery); if (columnFrom.Parent.Name == tq.TableSchema.Name) { string columnName = columnFrom.Name.ToUpper(); foreach (ColumnQuery column in tq.ColumnsQuery) { if (column.PatternColumnName.ToUpper().Equals(columnName)) return column; } } else { throw new OrmException(Messages.DontHaveColumnInColumnCollection); } return new ColumnQuery(columnFrom, tq); } protected TableQuery(TableSchema tableSchema, Query parent) { this.TableSchema = tableSchema; this.Parent = parent; } protected TableQuery(string tableName, Query parent) { this.TableSchema = SchemaContainer.GetSchema(tableName); if (this.TableSchema == null) this.TableName = tableName; this.Parent = parent; } protected TableQuery(Query subQuery, Query parent) { this.SubQuery = subQuery; this.Parent = parent; } public Query AllColumns() { if (this.TableSchema != null) { foreach (ColumnSchema columnSchema in this.TableSchema.Columns) { this.Column(columnSchema); } } else { this.Column("*"); } return this.Parent; } public Query Columns(params string[] columns) { foreach (string column in columns) { this.Column(column); } return this.Parent; } public ColumnQuery Column(string columnName) { return this.ColumnsQuery.Add(new ColumnQuery(columnName, this)); } public ColumnQuery Column(ColumnSchema columnSchema) { return this.ColumnsQuery.Add(new ColumnQuery(columnSchema, this)); } public TableQuery WithNoLock() { this.NoLock = true; return this; } public TableQuery As(string alias) { this.Alias = alias; return this; } public bool EqualsTableNameOrAlias(string tableName) { if (this.TableSchema == null) { if (!string.IsNullOrEmpty(this.TableName) && this.TableName.ToUpper().Equals(tableName)) return true; } else { if (this.TableSchema.Name.ToUpper().Equals(tableName)) return true; } if (!string.IsNullOrEmpty(this.Alias) && this.Alias.ToUpper().Equals(tableName)) return true; return false; } public bool EqualsSchemaTableName(string tableName) { if (this.TableSchema != null) { return this.TableSchema.Name.ToUpper().Equals(tableName); } else { return false; } } public Query End() { return this.Parent; } #region Aggregate #region Methods Function Parameter public Aggregate Count(object parameter) { return this.Aggregateds.Add(Aggregate.Count(parameter, this)); } public Aggregate Sum(object parameter) { return this.Aggregateds.Add(Aggregate.Sum(parameter, this)); } public Aggregate Avg(object parameter) { return this.Aggregateds.Add(Aggregate.Avg(parameter, this)); } public Aggregate Min(object parameter) { return this.Aggregateds.Add(Aggregate.Min(parameter, this)); } public Aggregate Max(object parameter) { return this.Aggregateds.Add(Aggregate.Max(parameter, this)); } public Aggregate StDev(object parameter) { return this.Aggregateds.Add(Aggregate.StDev(parameter, this)); } public Aggregate StDevP(object parameter) { return this.Aggregateds.Add(Aggregate.StDevP(parameter, this)); } public Aggregate Var(object parameter) { return this.Aggregateds.Add(Aggregate.Var(parameter, this)); } public Aggregate VarP(object parameter) { return this.Aggregateds.Add(Aggregate.VarP(parameter, this)); } public Aggregate Convert(object parameter) { return this.Aggregateds.Add(Aggregate.Convert(parameter, this)); } #endregion #region Methods Function Column Name public Aggregate CountColumn(string columnName) { return this.Aggregateds.Add(Aggregate.CountColumn(columnName, this)); } public Aggregate SumColumn(string columnName) { return this.Aggregateds.Add(Aggregate.SumColumn(columnName, this)); } public Aggregate AvgColumn(string columnName) { return this.Aggregateds.Add(Aggregate.AvgColumn(columnName, this)); } public Aggregate MinColumn(string columnName) { return this.Aggregateds.Add(Aggregate.MinColumn(columnName, this)); } public Aggregate MaxColumn(string columnName) { return this.Aggregateds.Add(Aggregate.MaxColumn(columnName, this)); } public Aggregate StDevColumn(string columnName) { return this.Aggregateds.Add(Aggregate.StDevColumn(columnName, this)); } public Aggregate StDevPColumn(string columnName) { return this.Aggregateds.Add(Aggregate.StDevPColumn(columnName, this)); } public Aggregate VarColumn(string columnName) { return this.Aggregateds.Add(Aggregate.VarColumn(columnName, this)); } public Aggregate VarPColumn(string columnName) { return this.Aggregateds.Add(Aggregate.VarPColumn(columnName, this)); } public Aggregate ConvertColumn(string columnName) { return this.Aggregateds.Add(Aggregate.ConvertColumn(columnName, this)); } #endregion #region Methods Function Column public Aggregate Count(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.CountColumn(column, this)); } public Aggregate Sum(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.SumColumn(column, this)); } public Aggregate Avg(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.AvgColumn(column, this)); } public Aggregate Min(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.MinColumn(column, this)); } public Aggregate Max(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.MaxColumn(column, this)); } public Aggregate StDev(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.StDevColumn(column, this)); } public Aggregate StDevP(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.StDevPColumn(column, this)); } public Aggregate Var(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.VarColumn(column, this)); } public Aggregate VarP(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.VarPColumn(column, this)); } public Aggregate Convert(ColumnSchema column) { return this.Aggregateds.Add(Aggregate.ConvertColumn(column, this)); } #endregion #region Methods Function Sub-Aggregate public Aggregate Count(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Count(subAggregate, this)); } public Aggregate Sum(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Sum(subAggregate, this)); } public Aggregate Avg(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Avg(subAggregate, this)); } public Aggregate Min(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Min(subAggregate, this)); } public Aggregate Max(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Max(subAggregate, this)); } public Aggregate StDev(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.StDev(subAggregate, this)); } public Aggregate StDevP(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.StDevP(subAggregate, this)); } public Aggregate Var(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Var(subAggregate, this)); } public Aggregate VarP(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.VarP(subAggregate, this)); } public Aggregate Convert(Aggregate subAggregate) { return this.Aggregateds.Add(Aggregate.Convert(subAggregate, this)); } #endregion #endregion #endregion public QueryJoin AsQueryJoin() { return this as QueryJoin; } } }
// // Encog(tm) Core v3.2 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using Encog.MathUtil.Error; using Encog.MathUtil.LIBSVM; using Encog.ML.Data; using Encog.ML.Train; using Encog.Neural.Networks.Training.Propagation; using Encog.Util; using Encog.Util.Logging; namespace Encog.ML.SVM.Training { /// <summary> /// Provides training for Support Vector Machine networks. /// </summary> /// public class SVMTrain : BasicTraining { /// <summary> /// The default starting number for C. /// </summary> /// public const double DefaultConstBegin = -5; /// <summary> /// The default ending number for C. /// </summary> /// public const double DefaultConstEnd = 15; /// <summary> /// The default step for C. /// </summary> /// public const double DefaultConstStep = 2; /// <summary> /// The default gamma begin. /// </summary> /// public const double DefaultGammaBegin = -10; /// <summary> /// The default gamma end. /// </summary> /// public const double DefaultGammaEnd = 10; /// <summary> /// The default gamma step. /// </summary> /// public const double DefaultGammaStep = 1; /// <summary> /// The network that is to be trained. /// </summary> /// private readonly SupportVectorMachine _network; /// <summary> /// The problem to train for. /// </summary> /// private readonly svm_problem _problem; /// <summary> /// The const c value. /// </summary> /// private double _c; /// <summary> /// The number of folds. /// </summary> /// private int _fold; /// <summary> /// The gamma value. /// </summary> /// private double _gamma; /// <summary> /// Is the training done. /// </summary> /// private bool _trainingDone; /// <summary> /// Construct a trainer for an SVM network. /// </summary> /// /// <param name="method">The network to train.</param> /// <param name="dataSet">The training data for this network.</param> public SVMTrain(SupportVectorMachine method, IMLDataSet dataSet) : base(TrainingImplementationType.OnePass) { _fold = 0; _network = method; Training = dataSet; _trainingDone = false; _problem = EncodeSVMProblem.Encode(dataSet, 0); _gamma = 1.0d/_network.InputCount; _c = 1.0d; } /// <inheritdoc/> public override sealed bool CanContinue { get { return false; } } /// <summary> /// Set the constant C. /// </summary> public double C { get { return _c; } set { if (value <= 0 || value < EncogFramework.DefaultDoubleEqual) { throw new EncogError("SVM training cannot use a c value less than zero."); } _c = value; } } /// <summary> /// Set the number of folds. /// </summary> public int Fold { get { return _fold; } set { _fold = value; } } /// <summary> /// Set the gamma. /// </summary> public double Gamma { get { return _gamma; } set { if (value <= 0 || value < EncogFramework.DefaultDoubleEqual) { throw new EncogError("SVM training cannot use a gamma value less than zero."); } _gamma = value; } } /// <inheritdoc/> public override IMLMethod Method { get { return _network; } } /// <value>The problem being trained.</value> public svm_problem Problem { get { return _problem; } } /// <value>True if the training is done.</value> public override bool TrainingDone { get { return _trainingDone; } } /// <summary> /// Evaluate the error for the specified model. /// </summary> /// /// <param name="param">The params for the SVN.</param> /// <param name="prob">The problem to evaluate.</param> /// <param name="target">The output values from the SVN.</param> /// <returns>The calculated error.</returns> private static double Evaluate(svm_parameter param, svm_problem prob, double[] target) { int totalCorrect = 0; var error = new ErrorCalculation(); if ((param.svm_type == svm_parameter.EPSILON_SVR) || (param.svm_type == svm_parameter.NU_SVR)) { for (int i = 0; i < prob.l; i++) { double ideal = prob.y[i]; double actual = target[i]; error.UpdateError(actual, ideal); } return error.Calculate(); } for (int i = 0; i < prob.l; i++) { if (target[i] == prob.y[i]) { ++totalCorrect; } } return Format.HundredPercent*totalCorrect/prob.l; } /// <summary> /// Perform either a train or a cross validation. If the folds property is /// greater than 1 then cross validation will be done. Cross validation does /// not produce a usable model, but it does set the error. /// If you are cross validating try C and Gamma values until you have a good /// error rate. Then use those values to train, producing the final model. /// </summary> /// public override sealed void Iteration() { _network.Params.C = _c; _network.Params.gamma = _gamma; EncogLogging.Log(EncogLogging.LevelInfo, "Training with parameters C = " + _c + ", gamma = " + _gamma); if (_fold > 1) { // cross validate var target = new double[_problem.l]; svm.svm_cross_validation(_problem, _network.Params, _fold, target); _network.Model = null; Error = Evaluate(_network.Params, _problem, target); } else { // train _network.Model = svm.svm_train(_problem, _network.Params); Error = _network.CalculateError(Training); } _trainingDone = true; } /// <inheritdoc/> public override sealed TrainingContinuation Pause() { return null; } /// <inheritdoc/> public override void Resume(TrainingContinuation state) { } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Diagnostics; using System.Globalization; using System.IO; using System.Reflection; using System.Text; using Microsoft.Xml; using System.Collections.Generic; using Microsoft.Xml.Serialization; using System.Security; #if !NET_NATIVE using ExtensionDataObject = System.Object; #endif namespace System.Runtime.Serialization { #if USE_REFEMIT || NET_NATIVE public class XmlObjectSerializerWriteContext : XmlObjectSerializerContext #else internal class XmlObjectSerializerWriteContext : XmlObjectSerializerContext #endif { private ObjectReferenceStack _byValObjectsInScope = new ObjectReferenceStack(); private XmlSerializableWriter _xmlSerializableWriter; private const int depthToCheckCyclicReference = 512; private ObjectToIdCache _serializedObjects; private bool _isGetOnlyCollection; private readonly bool _unsafeTypeForwardingEnabled; protected bool serializeReadOnlyTypes; protected bool preserveObjectReferences; internal static XmlObjectSerializerWriteContext CreateContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver dataContractResolver) { return (serializer.PreserveObjectReferences || serializer.SerializationSurrogateProvider != null) ? new XmlObjectSerializerWriteContextComplex(serializer, rootTypeDataContract, dataContractResolver) : new XmlObjectSerializerWriteContext(serializer, rootTypeDataContract, dataContractResolver); } protected XmlObjectSerializerWriteContext(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver resolver) : base(serializer, rootTypeDataContract, resolver) { this.serializeReadOnlyTypes = serializer.SerializeReadOnlyTypes; // Known types restricts the set of types that can be deserialized _unsafeTypeForwardingEnabled = true; } internal XmlObjectSerializerWriteContext(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject) : base(serializer, maxItemsInObjectGraph, streamingContext, ignoreExtensionDataObject) { // Known types restricts the set of types that can be deserialized _unsafeTypeForwardingEnabled = true; } #if USE_REFEMIT || NET_NATIVE internal ObjectToIdCache SerializedObjects #else protected ObjectToIdCache SerializedObjects #endif { get { if (_serializedObjects == null) _serializedObjects = new ObjectToIdCache(); return _serializedObjects; } } internal override bool IsGetOnlyCollection { get { return _isGetOnlyCollection; } set { _isGetOnlyCollection = value; } } internal bool SerializeReadOnlyTypes { get { return this.serializeReadOnlyTypes; } } internal bool UnsafeTypeForwardingEnabled { get { return _unsafeTypeForwardingEnabled; } } #if USE_REFEMIT public void StoreIsGetOnlyCollection() #else internal void StoreIsGetOnlyCollection() #endif { _isGetOnlyCollection = true; } #if USE_REFEMIT public void InternalSerializeReference(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #else internal void InternalSerializeReference(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #endif { if (!OnHandleReference(xmlWriter, obj, true /*canContainCyclicReference*/)) InternalSerialize(xmlWriter, obj, isDeclaredType, writeXsiType, declaredTypeID, declaredTypeHandle); OnEndHandleReference(xmlWriter, obj, true /*canContainCyclicReference*/); } #if USE_REFEMIT public virtual void InternalSerialize(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #else internal virtual void InternalSerialize(XmlWriterDelegator xmlWriter, object obj, bool isDeclaredType, bool writeXsiType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle) #endif { if (writeXsiType) { Type declaredType = Globals.TypeOfObject; SerializeWithXsiType(xmlWriter, obj, obj.GetType().TypeHandle, null/*type*/, -1, declaredType.TypeHandle, declaredType); } else if (isDeclaredType) { DataContract contract = GetDataContract(declaredTypeID, declaredTypeHandle); SerializeWithoutXsiType(contract, xmlWriter, obj, declaredTypeHandle); } else { RuntimeTypeHandle objTypeHandle = obj.GetType().TypeHandle; if (declaredTypeHandle.GetHashCode() == objTypeHandle.GetHashCode()) // semantically the same as Value == Value; Value is not available in SL { DataContract dataContract = (declaredTypeID >= 0) ? GetDataContract(declaredTypeID, declaredTypeHandle) : GetDataContract(declaredTypeHandle, null /*type*/); SerializeWithoutXsiType(dataContract, xmlWriter, obj, declaredTypeHandle); } else { SerializeWithXsiType(xmlWriter, obj, objTypeHandle, null /*type*/, declaredTypeID, declaredTypeHandle, Type.GetTypeFromHandle(declaredTypeHandle)); } } } internal void SerializeWithoutXsiType(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle declaredTypeHandle) { if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; if (dataContract.KnownDataContracts != null) { scopedKnownTypes.Push(dataContract.KnownDataContracts); WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle); scopedKnownTypes.Pop(); } else { WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle); } } internal virtual void SerializeWithXsiTypeAtTopLevel(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle originalDeclaredTypeHandle, Type graphType) { bool verifyKnownType = false; Type declaredType = rootTypeDataContract.UnderlyingType; if (declaredType.GetTypeInfo().IsInterface && CollectionDataContract.IsCollectionInterface(declaredType)) { if (DataContractResolver != null) { WriteResolvedTypeInfo(xmlWriter, graphType, declaredType); } } else if (!declaredType.IsArray) //Array covariance is not supported in XSD. If declared type is array do not write xsi:type. Instead write xsi:type for each item { verifyKnownType = WriteTypeInfo(xmlWriter, dataContract, rootTypeDataContract); } SerializeAndVerifyType(dataContract, xmlWriter, obj, verifyKnownType, originalDeclaredTypeHandle, declaredType); } protected virtual void SerializeWithXsiType(XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle objectTypeHandle, Type objectType, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle, Type declaredType) { bool verifyKnownType = false; #if !NET_NATIVE DataContract dataContract; if (declaredType.GetTypeInfo().IsInterface && CollectionDataContract.IsCollectionInterface(declaredType)) { dataContract = GetDataContractSkipValidation(DataContract.GetId(objectTypeHandle), objectTypeHandle, objectType); if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; dataContract = GetDataContract(declaredTypeHandle, declaredType); #else DataContract dataContract = DataContract.GetDataContractFromGeneratedAssembly(declaredType); if (dataContract.TypeIsInterface && dataContract.TypeIsCollectionInterface) { if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; if (this.Mode == SerializationMode.SharedType && dataContract.IsValidContract(this.Mode)) dataContract = dataContract.GetValidContract(this.Mode); else dataContract = GetDataContract(declaredTypeHandle, declaredType); #endif if (!WriteClrTypeInfo(xmlWriter, dataContract) && DataContractResolver != null) { if (objectType == null) { objectType = Type.GetTypeFromHandle(objectTypeHandle); } WriteResolvedTypeInfo(xmlWriter, objectType, declaredType); } } else if (declaredType.IsArray)//Array covariance is not supported in XSD. If declared type is array do not write xsi:type. Instead write xsi:type for each item { // A call to OnHandleIsReference is not necessary here -- arrays cannot be IsReference dataContract = GetDataContract(objectTypeHandle, objectType); WriteClrTypeInfo(xmlWriter, dataContract); dataContract = GetDataContract(declaredTypeHandle, declaredType); } else { dataContract = GetDataContract(objectTypeHandle, objectType); if (OnHandleIsReference(xmlWriter, dataContract, obj)) return; if (!WriteClrTypeInfo(xmlWriter, dataContract)) { DataContract declaredTypeContract = (declaredTypeID >= 0) ? GetDataContract(declaredTypeID, declaredTypeHandle) : GetDataContract(declaredTypeHandle, declaredType); verifyKnownType = WriteTypeInfo(xmlWriter, dataContract, declaredTypeContract); } } SerializeAndVerifyType(dataContract, xmlWriter, obj, verifyKnownType, declaredTypeHandle, declaredType); } internal bool OnHandleIsReference(XmlWriterDelegator xmlWriter, DataContract contract, object obj) { if (!contract.IsReference || _isGetOnlyCollection) { return false; } bool isNew = true; int objectId = SerializedObjects.GetId(obj, ref isNew); _byValObjectsInScope.EnsureSetAsIsReference(obj); if (isNew) { xmlWriter.WriteAttributeString(Globals.SerPrefix, DictionaryGlobals.IdLocalName, DictionaryGlobals.SerializationNamespace, string.Format(CultureInfo.InvariantCulture, "{0}{1}", "i", objectId)); return false; } else { xmlWriter.WriteAttributeString(Globals.SerPrefix, DictionaryGlobals.RefLocalName, DictionaryGlobals.SerializationNamespace, string.Format(CultureInfo.InvariantCulture, "{0}{1}", "i", objectId)); return true; } } protected void SerializeAndVerifyType(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, bool verifyKnownType, RuntimeTypeHandle declaredTypeHandle, Type declaredType) { bool knownTypesAddedInCurrentScope = false; if (dataContract.KnownDataContracts != null) { scopedKnownTypes.Push(dataContract.KnownDataContracts); knownTypesAddedInCurrentScope = true; } #if !NET_NATIVE if (verifyKnownType) { if (!IsKnownType(dataContract, declaredType)) { DataContract knownContract = ResolveDataContractFromKnownTypes(dataContract.StableName.Name, dataContract.StableName.Namespace, null /*memberTypeContract*/); if (knownContract == null || knownContract.UnderlyingType != dataContract.UnderlyingType) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(string.Format(SRSerialization.DcTypeNotFoundOnSerialize, DataContract.GetClrTypeFullName(dataContract.UnderlyingType), dataContract.StableName.Name, dataContract.StableName.Namespace))); } } } #endif WriteDataContractValue(dataContract, xmlWriter, obj, declaredTypeHandle); if (knownTypesAddedInCurrentScope) { scopedKnownTypes.Pop(); } } internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, DataContract dataContract) { return false; } internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, string clrTypeName, string clrAssemblyName) { return false; } internal virtual bool WriteClrTypeInfo(XmlWriterDelegator xmlWriter, Type dataContractType, string clrTypeName, string clrAssemblyName) { return false; } #if USE_REFEMIT || NET_NATIVE public virtual void WriteAnyType(XmlWriterDelegator xmlWriter, object value) #else internal virtual void WriteAnyType(XmlWriterDelegator xmlWriter, object value) #endif { xmlWriter.WriteAnyType(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteString(XmlWriterDelegator xmlWriter, string value) #else internal virtual void WriteString(XmlWriterDelegator xmlWriter, string value) #endif { xmlWriter.WriteString(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteString(XmlWriterDelegator xmlWriter, string value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteString(XmlWriterDelegator xmlWriter, string value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(string), true/*isMemberTypeSerializable*/, name, ns); else { xmlWriter.WriteStartElementPrimitive(name, ns); xmlWriter.WriteString(value); xmlWriter.WriteEndElementPrimitive(); } } #if USE_REFEMIT || NET_NATIVE public virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value) #else internal virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value) #endif { xmlWriter.WriteBase64(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteBase64(XmlWriterDelegator xmlWriter, byte[] value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(byte[]), true/*isMemberTypeSerializable*/, name, ns); else { xmlWriter.WriteStartElementPrimitive(name, ns); xmlWriter.WriteBase64(value); xmlWriter.WriteEndElementPrimitive(); } } #if USE_REFEMIT || NET_NATIVE public virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value) #else internal virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value) #endif { xmlWriter.WriteUri(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteUri(XmlWriterDelegator xmlWriter, Uri value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(Uri), true/*isMemberTypeSerializable*/, name, ns); else { xmlWriter.WriteStartElementPrimitive(name, ns); xmlWriter.WriteUri(value); xmlWriter.WriteEndElementPrimitive(); } } #if USE_REFEMIT || NET_NATIVE public virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value) #else internal virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value) #endif { xmlWriter.WriteQName(value); } #if USE_REFEMIT || NET_NATIVE public virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value, XmlDictionaryString name, XmlDictionaryString ns) #else internal virtual void WriteQName(XmlWriterDelegator xmlWriter, XmlQualifiedName value, XmlDictionaryString name, XmlDictionaryString ns) #endif { if (value == null) WriteNull(xmlWriter, typeof(XmlQualifiedName), true/*isMemberTypeSerializable*/, name, ns); else { if (ns != null && ns.Value != null && ns.Value.Length > 0) xmlWriter.WriteStartElement(Globals.ElementPrefix, name, ns); else xmlWriter.WriteStartElement(name, ns); xmlWriter.WriteQName(value); xmlWriter.WriteEndElement(); } } internal void HandleGraphAtTopLevel(XmlWriterDelegator writer, object obj, DataContract contract) { writer.WriteXmlnsAttribute(Globals.XsiPrefix, DictionaryGlobals.SchemaInstanceNamespace); OnHandleReference(writer, obj, true /*canContainReferences*/); } internal virtual bool OnHandleReference(XmlWriterDelegator xmlWriter, object obj, bool canContainCyclicReference) { if (xmlWriter.depth < depthToCheckCyclicReference) return false; if (canContainCyclicReference) { if (_byValObjectsInScope.Contains(obj)) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(string.Format(SRSerialization.CannotSerializeObjectWithCycles, DataContract.GetClrTypeFullName(obj.GetType())))); _byValObjectsInScope.Push(obj); } return false; } internal virtual void OnEndHandleReference(XmlWriterDelegator xmlWriter, object obj, bool canContainCyclicReference) { if (xmlWriter.depth < depthToCheckCyclicReference) return; if (canContainCyclicReference) { _byValObjectsInScope.Pop(obj); } } #if USE_REFEMIT public void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable) #else internal void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable) #endif { CheckIfTypeSerializable(memberType, isMemberTypeSerializable); WriteNull(xmlWriter); } internal void WriteNull(XmlWriterDelegator xmlWriter, Type memberType, bool isMemberTypeSerializable, XmlDictionaryString name, XmlDictionaryString ns) { xmlWriter.WriteStartElement(name, ns); WriteNull(xmlWriter, memberType, isMemberTypeSerializable); xmlWriter.WriteEndElement(); } #if USE_REFEMIT public void IncrementArrayCount(XmlWriterDelegator xmlWriter, Array array) #else internal void IncrementArrayCount(XmlWriterDelegator xmlWriter, Array array) #endif { IncrementCollectionCount(xmlWriter, array.GetLength(0)); } #if USE_REFEMIT public void IncrementCollectionCount(XmlWriterDelegator xmlWriter, ICollection collection) #else internal void IncrementCollectionCount(XmlWriterDelegator xmlWriter, ICollection collection) #endif { IncrementCollectionCount(xmlWriter, collection.Count); } #if USE_REFEMIT public void IncrementCollectionCountGeneric<T>(XmlWriterDelegator xmlWriter, ICollection<T> collection) #else internal void IncrementCollectionCountGeneric<T>(XmlWriterDelegator xmlWriter, ICollection<T> collection) #endif { IncrementCollectionCount(xmlWriter, collection.Count); } private void IncrementCollectionCount(XmlWriterDelegator xmlWriter, int size) { IncrementItemCount(size); WriteArraySize(xmlWriter, size); } internal virtual void WriteArraySize(XmlWriterDelegator xmlWriter, int size) { } #if USE_REFEMIT public static bool IsMemberTypeSameAsMemberValue(object obj, Type memberType) #else internal static bool IsMemberTypeSameAsMemberValue(object obj, Type memberType) #endif { if (obj == null || memberType == null) return false; return obj.GetType().TypeHandle.Equals(memberType.TypeHandle); } #if USE_REFEMIT public static T GetDefaultValue<T>() #else internal static T GetDefaultValue<T>() #endif { return default(T); } #if USE_REFEMIT public static T GetNullableValue<T>(Nullable<T> value) where T : struct #else internal static T GetNullableValue<T>(Nullable<T> value) where T : struct #endif { // value.Value will throw if hasValue is false return value.Value; } #if USE_REFEMIT public static void ThrowRequiredMemberMustBeEmitted(string memberName, Type type) #else internal static void ThrowRequiredMemberMustBeEmitted(string memberName, Type type) #endif { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new SerializationException(string.Format(SRSerialization.RequiredMemberMustBeEmitted, memberName, type.FullName))); } #if USE_REFEMIT public static bool GetHasValue<T>(Nullable<T> value) where T : struct #else internal static bool GetHasValue<T>(Nullable<T> value) where T : struct #endif { return value.HasValue; } internal void WriteIXmlSerializable(XmlWriterDelegator xmlWriter, object obj) { if (_xmlSerializableWriter == null) _xmlSerializableWriter = new XmlSerializableWriter(); WriteIXmlSerializable(xmlWriter, obj, _xmlSerializableWriter); } internal static void WriteRootIXmlSerializable(XmlWriterDelegator xmlWriter, object obj) { WriteIXmlSerializable(xmlWriter, obj, new XmlSerializableWriter()); } private static void WriteIXmlSerializable(XmlWriterDelegator xmlWriter, object obj, XmlSerializableWriter xmlSerializableWriter) { xmlSerializableWriter.BeginWrite(xmlWriter.Writer, obj); IXmlSerializable xmlSerializable = obj as IXmlSerializable; if (xmlSerializable != null) xmlSerializable.WriteXml(xmlSerializableWriter); else { XmlElement xmlElement = obj as XmlElement; if (xmlElement != null) xmlElement.WriteTo(xmlSerializableWriter); else { XmlNode[] xmlNodes = obj as XmlNode[]; if (xmlNodes != null) foreach (XmlNode xmlNode in xmlNodes) xmlNode.WriteTo(xmlSerializableWriter); else throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(string.Format(SRSerialization.UnknownXmlType, DataContract.GetClrTypeFullName(obj.GetType())))); } } xmlSerializableWriter.EndWrite(); } protected virtual void WriteDataContractValue(DataContract dataContract, XmlWriterDelegator xmlWriter, object obj, RuntimeTypeHandle declaredTypeHandle) { dataContract.WriteXmlValue(xmlWriter, obj, this); } protected virtual void WriteNull(XmlWriterDelegator xmlWriter) { XmlObjectSerializer.WriteNull(xmlWriter); } private void WriteResolvedTypeInfo(XmlWriterDelegator writer, Type objectType, Type declaredType) { XmlDictionaryString typeName, typeNamespace; if (ResolveType(objectType, declaredType, out typeName, out typeNamespace)) { WriteTypeInfo(writer, typeName, typeNamespace); } } private bool ResolveType(Type objectType, Type declaredType, out XmlDictionaryString typeName, out XmlDictionaryString typeNamespace) { if (!DataContractResolver.TryResolveType(objectType, declaredType, KnownTypeResolver, out typeName, out typeNamespace)) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(string.Format(SRSerialization.ResolveTypeReturnedFalse, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType)))); } if (typeName == null) { if (typeNamespace == null) { return false; } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(string.Format(SRSerialization.ResolveTypeReturnedNull, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType)))); } } if (typeNamespace == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(XmlObjectSerializer.CreateSerializationException(string.Format(SRSerialization.ResolveTypeReturnedNull, DataContract.GetClrTypeFullName(DataContractResolver.GetType()), DataContract.GetClrTypeFullName(objectType)))); } return true; } protected virtual bool WriteTypeInfo(XmlWriterDelegator writer, DataContract contract, DataContract declaredContract) { if (XmlObjectSerializer.IsContractDeclared(contract, declaredContract)) { return false; } bool hasResolver = DataContractResolver != null; if (hasResolver) { WriteResolvedTypeInfo(writer, contract.UnderlyingType, declaredContract.UnderlyingType); } else { WriteTypeInfo(writer, contract.Name, contract.Namespace); } return hasResolver; } protected virtual void WriteTypeInfo(XmlWriterDelegator writer, string dataContractName, string dataContractNamespace) { writer.WriteAttributeQualifiedName(Globals.XsiPrefix, DictionaryGlobals.XsiTypeLocalName, DictionaryGlobals.SchemaInstanceNamespace, dataContractName, dataContractNamespace); } protected virtual void WriteTypeInfo(XmlWriterDelegator writer, XmlDictionaryString dataContractName, XmlDictionaryString dataContractNamespace) { writer.WriteAttributeQualifiedName(Globals.XsiPrefix, DictionaryGlobals.XsiTypeLocalName, DictionaryGlobals.SchemaInstanceNamespace, dataContractName, dataContractNamespace); } #if !NET_NATIVE public void WriteExtensionData(XmlWriterDelegator xmlWriter, ExtensionDataObject extensionData, int memberIndex) { // Needed by the code generator, but not called. } #endif } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // #define CACHEINFO_ENABLE_TRACKING_THREADS //#define CACHEINFO_DEBUG_THREADING_ISSUES namespace Microsoft.Zelig.CodeGeneration.IR { using System; using System.Collections.Generic; using Microsoft.Zelig.Runtime.TypeSystem; public abstract class ControlFlowGraphState { public static CompilationConstraints[] SharedEmptyCompilationConstraintsArray = new CompilationConstraints[0]; public abstract class CachedInfo : IDisposable { public static CachedInfo[] SharedEmptyArray = new CachedInfo[0]; // // State // protected ControlFlowGraphState m_owner; private int m_version; private int m_lockCount; #if CACHEINFO_ENABLE_TRACKING_THREADS private System.Threading.Thread m_lockOwner; #if CACHEINFO_DEBUG_THREADING_ISSUES protected System.Diagnostics.StackTrace m_lockTrace; #endif #endif // // Constructor Methods // protected CachedInfo() { } // // Helper Methods // public void Dispose() { Unlock(); } public void Lock() { #if CACHEINFO_ENABLE_TRACKING_THREADS System.Threading.Thread thisThread = System.Threading.Thread.CurrentThread; System.Threading.Thread activeThread = System.Threading.Interlocked.CompareExchange( ref m_lockOwner, thisThread, null ); CHECKS.ASSERT( activeThread == null || activeThread == thisThread, "Lock on {0} for method '{1}' already claimed", this.GetType().FullName, m_owner.Method.ToShortString() ); #if CACHEINFO_DEBUG_THREADING_ISSUES if(m_lockCount == 0) { m_lockTrace = new System.Diagnostics.StackTrace(); } #endif #endif m_lockCount++; } public void Unlock() { #if CACHEINFO_ENABLE_TRACKING_THREADS CHECKS.ASSERT( System.Threading.Thread.CurrentThread == m_lockOwner, "Lock on {0} not owned by current thread", this.GetType().FullName ); #endif CHECKS.ASSERT( m_lockCount > 0, "Underflow for lock on {0}", this.GetType().FullName ); m_lockCount--; #if CACHEINFO_ENABLE_TRACKING_THREADS if(m_lockCount == 0) { m_lockOwner = null; #if CACHEINFO_DEBUG_THREADING_ISSUES m_lockTrace = null; #endif } #endif } public void RefreshIfNeeded() { ThreadLockInfo.Assert( m_owner.Method ); if(m_version != m_owner.m_version) { if(m_lockCount != 0) { throw TypeConsistencyErrorException.Create( "Detected attempt to modify state of Control Flow Graph for {0}", this.GetType().FullName ); } Update(); m_version = m_owner.m_version; } } protected abstract void Update(); // // Access Methods // internal ControlFlowGraphState Owner { set { m_owner = value; } } } //--// class ThreadLockInfo : IDisposable { internal class ExceptionInfo : IDisposable { // // Constructor Methods // internal ExceptionInfo( MethodRepresentation md ) { s_lockException = md; } // // Helper Methods // public void Dispose() { s_lockException = null; } } // // State // [ThreadStatic] private static MethodRepresentation s_lock; [ThreadStatic] private static MethodRepresentation s_lockException; // // Constructor Methods // internal ThreadLockInfo( MethodRepresentation md ) { Assert( md ); s_lock = md; } internal ThreadLockInfo( MethodRepresentation md , MethodRepresentation md2 ) { Assert( md ); s_lock = md; s_lockException = md2; } // // Helper Methods // public void Dispose() { s_lock = null; s_lockException = null; } internal static void Assert( MethodRepresentation md ) { if(s_lock == null || md == s_lock || md == s_lockException) { return; } throw TypeConsistencyErrorException.Create( "Detected attempt to access state of Control Flow Graph for '{0}' while thread is locked to access only '{1}'", md.ToShortString(), s_lock.ToShortString() ); } } class GroupLockInfo : IDisposable { // // State // private readonly IDisposable[] m_locks; // // Constructor Methods // internal GroupLockInfo( IDisposable[] locks ) { m_locks = locks; } // // Helper Methods // public void Dispose() { // // Release in opposite order. // for(int i = m_locks.Length; --i >= 0; ) { m_locks[i].Dispose(); } } } //--// class CachedInfo_FlowInformation : CachedInfo { // // Helper Methods // protected override void Update() { using(new PerformanceCounters.ContextualTiming( m_owner, "FlowInformation" )) { foreach(BasicBlock bb in m_owner.m_basicBlocks) { bb.ResetFlowInformation(); } foreach(BasicBlock bb in m_owner.m_basicBlocks) { bb.UpdateFlowInformation(); } } } } //--// // // State // protected MethodRepresentation m_md; protected int m_version; protected EntryBasicBlock m_entryBasicBlock; protected ExitBasicBlock m_exitBasicBlock; protected BasicBlock[] m_basicBlocks; // This is the set of all the basic blocks in the CFG. protected VariableExpression m_returnValue; protected VariableExpression[] m_arguments; protected VariableExpression[] m_variables; protected int m_variablesCount; protected CachedInfo[] m_cache; // // Constructor Methods // protected ControlFlowGraphState() // Default constructor required by TypeSystemSerializer. { m_cache = CachedInfo.SharedEmptyArray; } protected ControlFlowGraphState( MethodRepresentation md ) { m_md = md; m_version = 1; m_basicBlocks = BasicBlock.SharedEmptyArray; m_variables = VariableExpression.SharedEmptyArray; m_cache = CachedInfo.SharedEmptyArray; } protected ControlFlowGraphState( ControlFlowGraphState source ) : this( source.m_md ) { } //--// // // Helper Methods // protected virtual void CloneVariables( CloningContext context , ControlFlowGraphState source ) { VariableExpression var; var = source.m_returnValue; if(var != null) { m_returnValue = AllocateTemporary( var.Type, var.DebugName ); context.Register( var, m_returnValue ); } //--// VariableExpression[] args = source.m_arguments; int argsNum = args.Length; m_arguments = new VariableExpression[argsNum]; for(int i = 0; i < argsNum; i++) { var = args[i]; m_arguments[i] = new ArgumentVariableExpression( var.Type, var.DebugName, i ); context.Register( var, m_arguments[i] ); } } //--// protected void TrackVariable( VariableExpression var ) { if(m_variablesCount == m_variables.Length) { m_variables = ArrayUtility.EnsureSizeOfNotNullArray( m_variables, m_variablesCount + 16 ); } m_variables[m_variablesCount++] = var; } public LocalVariableExpression AllocateLocal( TypeRepresentation td , VariableExpression.DebugInfo debugInfo ) { LocalVariableExpression newLocal = new LocalVariableExpression( td, debugInfo ); TrackVariable( newLocal ); return newLocal; } public TemporaryVariableExpression AllocateTemporary( TypeRepresentation td , VariableExpression.DebugInfo debugInfo ) { TemporaryVariableExpression newTmp = new TemporaryVariableExpression( td, debugInfo ); TrackVariable( newTmp ); return newTmp; } public ExceptionObjectVariableExpression AllocateExceptionObjectVariable( TypeRepresentation td ) { ExceptionObjectVariableExpression newEx = new ExceptionObjectVariableExpression( td, null ); TrackVariable( newEx ); return newEx; } //--// internal void Register( BasicBlock basicBlock ) { BumpVersion(); if(basicBlock is EntryBasicBlock) { CHECKS.ASSERT( m_entryBasicBlock == null, "Entry Basic Block already exists" ); m_entryBasicBlock = (EntryBasicBlock)basicBlock; } else if(basicBlock is ExitBasicBlock) { CHECKS.ASSERT( m_exitBasicBlock == null, "Exit Basic Block already exists" ); m_exitBasicBlock = (ExitBasicBlock)basicBlock; } m_basicBlocks = ArrayUtility.AddUniqueToNotNullArray( m_basicBlocks, basicBlock ); } internal void Deregister( BasicBlock basicBlock ) { BumpVersion(); if(basicBlock == m_entryBasicBlock) { m_entryBasicBlock = null; } else if(basicBlock == m_exitBasicBlock) { m_exitBasicBlock = null; } } internal void BumpVersion() { m_version++; } //--// public static IDisposable LockThreadToMethod( MethodRepresentation md ) { return new ThreadLockInfo( md ); } public static IDisposable AddExceptionToThreadMethodLock( MethodRepresentation md ) { return new ThreadLockInfo.ExceptionInfo( md ); } public IDisposable GroupLock( params IDisposable[] locks ) { return new GroupLockInfo( locks ); } protected T GetCachedInfo< T >() where T : CachedInfo, new() { foreach(CachedInfo ci in m_cache) { if(ci is T) { ci.RefreshIfNeeded(); return (T)ci; } } T newCI = new T(); newCI.Owner = this; m_cache = ArrayUtility.AppendToNotNullArray( m_cache, newCI ); newCI.RefreshIfNeeded(); return newCI; } public void UpdateFlowInformation() { GetCachedInfo< CachedInfo_FlowInformation >(); } public IDisposable LockFlowInformation() { var ci = GetCachedInfo< CachedInfo_FlowInformation >(); ci.Lock(); return ci; } //--// protected void InnerApplyTransformation( TransformationContextForIR context ) { context.Transform( ref m_md ); context.Transform( ref m_version ); context.Transform( ref m_entryBasicBlock ); context.Transform( ref m_exitBasicBlock ); context.Transform( ref m_basicBlocks ); context.Transform( ref m_returnValue ); context.Transform( ref m_arguments ); context.Transform( ref m_variables ); context.Transform( ref m_variablesCount ); } //--// public void PerformActionOnOperators( Action<Operator> action ) { foreach(BasicBlock bb in m_basicBlocks) { foreach(Operator op in bb.Operators) { action( op ); } } } //--// public BasicBlock FirstBasicBlock { get { if(m_basicBlocks != null && m_basicBlocks.Length > 0) { return m_basicBlocks[0]; } return CreateFirstNormalBasicBlock(); } } public NormalBasicBlock CreateFirstNormalBasicBlock() { // // Important: get 'NormalizedEntryBasicBlock' before allocating the new basic block, or it will be reclaimed!! // var bbPrev = this.NormalizedEntryBasicBlock; var bbNext = this.NormalizedExitBasicBlock; var bb = new NormalBasicBlock( this ); bbPrev.FlowControl = UnconditionalControlOperator.New( null, bb ); bb .FlowControl = UnconditionalControlOperator.New( null, bbNext ); return bb; } public NormalBasicBlock CreateLastNormalBasicBlock() { // // Important: get 'NormalizedExitBasicBlock' before allocating the new basic block, or it will be reclaimed!! // var bbNext = this.NormalizedExitBasicBlock; var bb = new NormalBasicBlock( this ); bb.FlowControl = UnconditionalControlOperator.New( null, bbNext ); return bb; } public void AddReturnOperator() { // // Create proper flow control for exit basic block. // ControlOperator op; if(m_returnValue != null) { op = ReturnControlOperator.New( m_returnValue ); } else { op = ReturnControlOperator.New(); } m_exitBasicBlock.AddOperator( op ); } public Operator GenerateVariableInitialization( Debugging.DebugInfo debugInfo , VariableExpression var ) { return GenerateVariableInitialization( debugInfo, var, var.Type, false ); } public abstract Operator GenerateVariableInitialization( Debugging.DebugInfo debugInfo , Expression var , TypeRepresentation td , bool fThroughPointer ); public abstract BasicBlock GetInjectionPoint( BasicBlock.Qualifier qualifier ); //--// public virtual void RenumberVariables() { int numLocal = 0; int numTmp = 0; int numEx = 0; foreach(VariableExpression var in m_variables) { if(var is LocalVariableExpression) { var.Number = numLocal++; } else if(var is TemporaryVariableExpression) { var.Number = numTmp++; } else if(var is ExceptionObjectVariableExpression) { var.Number = numEx++; } } } //--// public static bool SameCompilationConstraints( CompilationConstraints[] ccArray1 , CompilationConstraints[] ccArray2 ) { int len1 = ccArray1.Length; int len2 = ccArray2.Length; if(len1 != len2) { return false; } for(int i = 0; i < len1; i++) { if(ccArray1[i] != ccArray2[i]) { return false; } } return true; } public static CompilationConstraints[] AddCompilationConstraint( CompilationConstraints[] ccArray , CompilationConstraints cc ) { int pos = 0; for(; pos < ccArray.Length; pos++) { CompilationConstraints cc2 = ccArray[pos]; if(cc2 == cc) { return ccArray; } if(cc2 > cc) { break; } } return ArrayUtility.InsertAtPositionOfNotNullArray( ccArray, pos, cc ); } public static CompilationConstraints[] RemoveCompilationConstraint( CompilationConstraints[] ccArray , CompilationConstraints cc ) { for(int pos = 0; pos < ccArray.Length; pos++) { CompilationConstraints cc2 = ccArray[pos]; if(cc2 == cc) { return ArrayUtility.RemoveAtPositionFromNotNullArray( ccArray, pos ); } if(cc2 > cc) { break; } } return ccArray; } public static bool HasCompilationConstraint( CompilationConstraints[] ccArray , CompilationConstraints cc ) { for(int pos = 0; pos < ccArray.Length; pos++) { CompilationConstraints cc2 = ccArray[pos]; if(cc2 == cc) { return true; } if(cc2 > cc) { break; } } return false; } public static bool HasAnyCompilationConstraint( CompilationConstraints[] ccTarget , params CompilationConstraints[] ccFilter ) { CompilationConstraints match; return HasAnyCompilationConstraint( ccTarget, out match, ccFilter ); } public static bool HasAnyCompilationConstraint( CompilationConstraints[] ccTarget , out CompilationConstraints match , params CompilationConstraints[] ccFilter ) { foreach(CompilationConstraints cc in ccFilter) { if(HasCompilationConstraint( ccTarget, cc )) { match = cc; return true; } } match = default(CompilationConstraints); return false; } //--// public static Operator CheckSingleUse( Operator[][] useChains , VariableExpression var ) { Operator[] uses = useChains[var.SpanningTreeIndex]; if(uses.Length == 1) { return uses[0]; } return null; } public static Operator CheckSingleDefinition( Operator[][] defChains , VariableExpression var ) { Operator[] defs = defChains[var.SpanningTreeIndex]; if(defs.Length == 1) { return defs[0]; } return null; } public static Operator CheckSingleDefinition( GrowOnlyHashTable< VariableExpression, Operator > defLookup , Expression ex ) { VariableExpression var = ex as VariableExpression; if(var != null) { Operator def; if(defLookup.TryGetValue( var, out def )) { return def; } } return null; } //--// // // Access Methods // public abstract TypeSystemForIR TypeSystemForIR { get; } public CompilationConstraints[] CompilationConstraintsArray { get { CompilationConstraints[] res = SharedEmptyCompilationConstraintsArray; MethodRepresentation.BuildTimeAttributes bta = m_md.BuildTimeFlags; if ((bta & MethodRepresentation.BuildTimeAttributes.CanAllocate ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.Allocations_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.CannotAllocate ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.Allocations_OFF ); if ((bta & MethodRepresentation.BuildTimeAttributes.StackAvailable ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.StackAccess_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.StackNotAvailable ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.StackAccess_OFF ); if ((bta & MethodRepresentation.BuildTimeAttributes.EnableBoundsChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.BoundsChecks_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableBoundsChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.BoundsChecks_OFF ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableDeepBoundsChecks) != 0) res = AddCompilationConstraint( res, CompilationConstraints.BoundsChecks_OFF_DEEP ); if ((bta & MethodRepresentation.BuildTimeAttributes.EnableNullChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.NullChecks_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableNullChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.NullChecks_OFF ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableDeepNullChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.NullChecks_OFF_DEEP ); return res; } } public MethodRepresentation Method { get { return m_md; } } public EntryBasicBlock EntryBasicBlock { get { return m_entryBasicBlock; } } public ExitBasicBlock ExitBasicBlock { get { return m_exitBasicBlock; } } public BasicBlock NormalizedEntryBasicBlock { get { return this.GetInjectionPoint( BasicBlock.Qualifier.EntryInjectionStart ); } } public BasicBlock NormalizedExitBasicBlock { get { return this.GetInjectionPoint( BasicBlock.Qualifier.EpilogueStart ); } } public VariableExpression ReturnValue { get { return m_returnValue; } } public VariableExpression[] Arguments { get { return m_arguments; } } public int Version { get { return m_version; } } //--// // // Debug Methods // public override string ToString() { return string.Format( "FlowGraph({0})", m_md.ToShortString() ); } public void Dump( IIntermediateRepresentationDumper dumper ) { dumper.DumpGraph( this ); } public abstract string ToPrettyString( Operator op ); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Threading; using System.Diagnostics; using System.Collections.Generic; using System.Runtime.InteropServices; namespace System.Runtime.CompilerServices { // Marked [EagerStaticClassConstruction] because Cctor.GetCctor // uses _cctorGlobalLock [EagerOrderedStaticConstructor(EagerStaticConstructorOrder.CompilerServicesClassConstructorRunner)] internal static partial class ClassConstructorRunner { //============================================================================================================== // Ensures the class constructor for the given type has run. // // Called by the runtime when it finds a class whose static class constructor has probably not run // (probably because it checks in the initialized flag without thread synchronization). // // The context structure passed by reference lives in the image of one of the application's modules. // The contents are thus fixed (do not require pinning) and the address can be used as a unique // identifier for the context. // // This guarantee is violated in one specific case: where a class constructor cycle would cause a deadlock. If // so, per ECMA specs, this method returns without guaranteeing that the .cctor has run. // // No attempt is made to detect or break deadlocks due to other synchronization mechanisms. //============================================================================================================== #if !CORERT [RuntimeExport("CheckStaticClassConstruction")] public static unsafe void* CheckStaticClassConstruction(void* returnValue, StaticClassConstructionContext* pContext) { EnsureClassConstructorRun(pContext); return returnValue; } #else private unsafe static object CheckStaticClassConstructionReturnGCStaticBase(StaticClassConstructionContext* context, object gcStaticBase) { EnsureClassConstructorRun(context); return gcStaticBase; } private unsafe static IntPtr CheckStaticClassConstructionReturnNonGCStaticBase(StaticClassConstructionContext* context, IntPtr nonGcStaticBase) { EnsureClassConstructorRun(context); return nonGcStaticBase; } #endif public static unsafe void EnsureClassConstructorRun(StaticClassConstructionContext* pContext) { IntPtr pfnCctor = pContext->cctorMethodAddress; NoisyLog("EnsureClassConstructorRun, cctor={0}, thread={1}", pfnCctor, CurrentManagedThreadId); // If we were called from MRT, this check is redundant but harmless. This is in case someone within classlib // (cough, Reflection) needs to call this explicitly. if (pContext->initialized == 1) { NoisyLog("Cctor already run, cctor={0}, thread={1}", pfnCctor, CurrentManagedThreadId); return; } CctorHandle cctor = Cctor.GetCctor(pContext); Cctor[] cctors = cctor.Array; int cctorIndex = cctor.Index; try { Lock cctorLock = cctors[cctorIndex].Lock; if (DeadlockAwareAcquire(cctor, pfnCctor)) { int currentManagedThreadId = CurrentManagedThreadId; try { NoisyLog("Acquired cctor lock, cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); cctors[cctorIndex].HoldingThread = currentManagedThreadId; if (pContext->initialized == 0) // Check again in case some thread raced us while we were acquiring the lock. { TypeInitializationException priorException = cctors[cctorIndex].Exception; if (priorException != null) throw priorException; try { NoisyLog("Calling cctor, cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); Call<int>(pfnCctor); // Insert a memory barrier here to order any writes executed as part of static class // construction above with respect to the initialized flag update we're about to make // below. This is important since the fast path for checking the cctor uses a normal read // and doesn't come here so without the barrier it could observe initialized == 1 but // still see uninitialized static fields on the class. Interlocked.MemoryBarrier(); NoisyLog("Set type inited, cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); pContext->initialized = 1; } catch (Exception e) { TypeInitializationException wrappedException = new TypeInitializationException(null, SR.TypeInitialization_Type_NoTypeAvailable, e); cctors[cctorIndex].Exception = wrappedException; throw wrappedException; } } } finally { cctors[cctorIndex].HoldingThread = ManagedThreadIdNone; NoisyLog("Releasing cctor lock, cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); cctorLock.Release(); } } else { // Cctor cycle resulted in a deadlock. We will break the guarantee and return without running the // .cctor. } } finally { Cctor.Release(cctor); } NoisyLog("EnsureClassConstructorRun complete, cctor={0}, thread={1}", pfnCctor, CurrentManagedThreadId); } //========================================================================================================= // Return value: // true - lock acquired. // false - deadlock detected. Lock not acquired. //========================================================================================================= private static bool DeadlockAwareAcquire(CctorHandle cctor, IntPtr pfnCctor) { const int WaitIntervalSeedInMS = 1; // seed with 1ms and double every time through the loop const int WaitIntervalLimitInMS = WaitIntervalSeedInMS << 7; // limit of 128ms int waitIntervalInMS = WaitIntervalSeedInMS; int cctorIndex = cctor.Index; Cctor[] cctors = cctor.Array; Lock lck = cctors[cctorIndex].Lock; if (lck.IsAcquired) return false; // Thread recursively triggered the same cctor. if (lck.TryAcquire(waitIntervalInMS)) return true; // We couldn't acquire the lock. See if this .cctor is involved in a cross-thread deadlock. If so, break // the deadlock by breaking the guarantee - we'll skip running the .cctor and let the caller take his chances. int currentManagedThreadId = CurrentManagedThreadId; int unmarkCookie = -1; try { // We'll spin in a forever-loop of checking for a deadlock state, then waiting a short time, then // checking for a deadlock state again, and so on. This is because the BlockedRecord info has a built-in // lag time - threads don't report themselves as blocking until they've been blocked for a non-trivial // amount of time. // // If the threads are deadlocked for any reason other a class constructor cycling, this loop will never // terminate - this is by design. If the user code inside the class constructors were to // deadlock themselves, then that's a bug in user code. for (;;) { using (LockHolder.Hold(s_cctorGlobalLock)) { // Ask the guy who holds the cctor lock we're trying to acquire who he's waiting for. Keep // walking down that chain until we either discover a cycle or reach a non-blocking state. Note // that reaching a non-blocking state is not proof that we've avoided a deadlock due to the // BlockingRecord reporting lag. CctorHandle cctorWalk = cctor; int chainStepCount = 0; for (; chainStepCount < Cctor.Count; chainStepCount++) { int cctorWalkIndex = cctorWalk.Index; Cctor[] cctorWalkArray = cctorWalk.Array; int holdingThread = cctorWalkArray[cctorWalkIndex].HoldingThread; if (holdingThread == currentManagedThreadId) { // Deadlock detected. We will break the guarantee and return without running the .cctor. DebugLog("A class constructor was skipped due to class constructor cycle. cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); // We are maintaining an invariant that the BlockingRecords never show a cycle because, // before we add a record, we first check for a cycle. As a result, once we've said // we're waiting, we are committed to waiting and will not need to skip running this // .cctor. Debug.Assert(unmarkCookie == -1); return false; } if (holdingThread == ManagedThreadIdNone) { // No one appears to be holding this cctor lock. Give the current thread some more time // to acquire the lock. break; } cctorWalk = BlockingRecord.GetCctorThatThreadIsBlockedOn(holdingThread); if (cctorWalk.Array == null) { // The final thread in the chain appears to be blocked on nothing. Give the current // thread some more time to acquire the lock. break; } } // We don't allow cycles in the BlockingRecords, so we must always enumerate at most each entry, // but never more. Debug.Assert(chainStepCount < Cctor.Count); // We have not discovered a deadlock, so let's register the fact that we're waiting on another // thread and continue to wait. It is important that we only signal that we are blocked after // we check for a deadlock because, otherwise, we give all threads involved in the deadlock the // opportunity to break it themselves and that leads to "ping-ponging" between the cctors // involved in the cycle, allowing intermediate cctor results to be observed. // // The invariant here is that we never 'publish' a BlockingRecord that forms a cycle. So it is // important that the look-for-cycle-and-then-publish-wait-status operation be atomic with // respect to other updates to the BlockingRecords. if (unmarkCookie == -1) { NoisyLog("Mark thread blocked, cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); unmarkCookie = BlockingRecord.MarkThreadAsBlocked(currentManagedThreadId, cctor); } } // _cctorGlobalLock scope if (waitIntervalInMS < WaitIntervalLimitInMS) waitIntervalInMS *= 2; // We didn't find a cycle yet, try to take the lock again. if (lck.TryAcquire(waitIntervalInMS)) return true; } // infinite loop } finally { if (unmarkCookie != -1) { NoisyLog("Unmark thread blocked, cctor={0}, thread={1}", pfnCctor, currentManagedThreadId); BlockingRecord.UnmarkThreadAsBlocked(unmarkCookie); } } } //============================================================================================================== // These structs are allocated on demand whenever the runtime tries to run a class constructor. Once the // the class constructor has been successfully initialized, we reclaim this structure. The structure is long- // lived only if the class constructor threw an exception. This must be marked [EagerStaticClassConstruction] to // avoid infinite mutual recursion in GetCctor. //============================================================================================================== [EagerOrderedStaticConstructor(EagerStaticConstructorOrder.CompilerServicesClassConstructorRunnerCctor)] private unsafe struct Cctor { public Lock Lock; public TypeInitializationException Exception; public int HoldingThread; private int _refCount; private StaticClassConstructionContext* _pContext; // Because Cctor's are mutable structs, we have to give our callers raw references to the underlying arrays // for this collection to be usable. This also means once we place a Cctor in an array, we can't grow or // reallocate the array. private static Cctor[][] s_cctorArrays = new Cctor[10][]; private static int s_cctorArraysCount = 0; private static int s_count; //========================================================================================================== // Gets the Cctor entry associated with a specific class constructor context (creating it if necessary.) //========================================================================================================== public static CctorHandle GetCctor(StaticClassConstructionContext* pContext) { #if DEBUG const int Grow = 2; #else const int Grow = 10; #endif using (LockHolder.Hold(s_cctorGlobalLock)) { Cctor[] resultArray = null; int resultIndex = -1; if (s_count != 0) { // Search for the cctor context in our existing arrays for (int cctorIndex = 0; cctorIndex < s_cctorArraysCount; ++cctorIndex) { Cctor[] segment = s_cctorArrays[cctorIndex]; for (int i = 0; i < segment.Length; i++) { if (segment[i]._pContext == pContext) { resultArray = segment; resultIndex = i; break; } } if (resultArray != null) break; } } if (resultArray == null) { // look for an empty entry in an existing array for (int cctorIndex = 0; cctorIndex < s_cctorArraysCount; ++cctorIndex) { Cctor[] segment = s_cctorArrays[cctorIndex]; for (int i = 0; i < segment.Length; i++) { if (segment[i]._pContext == default(StaticClassConstructionContext*)) { resultArray = segment; resultIndex = i; break; } } if (resultArray != null) break; } if (resultArray == null) { // allocate a new array resultArray = new Cctor[Grow]; if (s_cctorArraysCount == s_cctorArrays.Length) { // grow the container Array.Resize(ref s_cctorArrays, (s_cctorArrays.Length * 2) + 1); } // store the array in the container, this cctor gets index 0 s_cctorArrays[s_cctorArraysCount] = resultArray; s_cctorArraysCount++; resultIndex = 0; } Debug.Assert(resultArray[resultIndex]._pContext == default(StaticClassConstructionContext*)); resultArray[resultIndex]._pContext = pContext; resultArray[resultIndex].Lock = new Lock(); s_count++; } Interlocked.Increment(ref resultArray[resultIndex]._refCount); return new CctorHandle(resultArray, resultIndex); } } public static int Count { get { Debug.Assert(s_cctorGlobalLock.IsAcquired); return s_count; } } public static void Release(CctorHandle cctor) { using (LockHolder.Hold(s_cctorGlobalLock)) { Cctor[] cctors = cctor.Array; int cctorIndex = cctor.Index; if (0 == Interlocked.Decrement(ref cctors[cctorIndex]._refCount)) { if (cctors[cctorIndex].Exception == null) { cctors[cctorIndex] = new Cctor(); s_count--; } } } } } private struct CctorHandle { public CctorHandle(Cctor[] array, int index) { _array = array; _index = index; } public Cctor[] Array { get { return _array; } } public int Index { get { return _index; } } private Cctor[] _array; private int _index; } //============================================================================================================== // Keeps track of threads that are blocked on a cctor lock (alas, we don't have ThreadLocals here in // System.Private.CoreLib so we have to use a side table.) // // This is used for cross-thread deadlock detection. // // - Data is only entered here if a thread has been blocked past a certain timeout (otherwise, it's certainly // not participating of a deadlock.) // - Reads and writes to _blockingRecord are guarded by _cctorGlobalLock. // - BlockingRecords for individual threads are created on demand. Since this is a rare event, we won't attempt // to recycle them directly (however, // ManagedThreadId's are themselves recycled pretty quickly - and threads that inherit the managed id also // inherit the BlockingRecord.) //============================================================================================================== private struct BlockingRecord { public int ManagedThreadId; // ManagedThreadId of the blocked thread public CctorHandle BlockedOn; public static int MarkThreadAsBlocked(int managedThreadId, CctorHandle blockedOn) { #if DEBUG const int Grow = 2; #else const int Grow = 10; #endif using (LockHolder.Hold(s_cctorGlobalLock)) { if (s_blockingRecords == null) s_blockingRecords = new BlockingRecord[Grow]; int found; for (found = 0; found < s_nextBlockingRecordIndex; found++) { if (s_blockingRecords[found].ManagedThreadId == managedThreadId) break; } if (found == s_nextBlockingRecordIndex) { if (s_nextBlockingRecordIndex == s_blockingRecords.Length) { BlockingRecord[] newBlockingRecords = new BlockingRecord[s_blockingRecords.Length + Grow]; for (int i = 0; i < s_blockingRecords.Length; i++) { newBlockingRecords[i] = s_blockingRecords[i]; } s_blockingRecords = newBlockingRecords; } s_blockingRecords[s_nextBlockingRecordIndex].ManagedThreadId = managedThreadId; s_nextBlockingRecordIndex++; } s_blockingRecords[found].BlockedOn = blockedOn; return found; } } public static void UnmarkThreadAsBlocked(int blockRecordIndex) { // This method must never throw s_cctorGlobalLock.Acquire(); s_blockingRecords[blockRecordIndex].BlockedOn = new CctorHandle(null, 0); s_cctorGlobalLock.Release(); } public static CctorHandle GetCctorThatThreadIsBlockedOn(int managedThreadId) { Debug.Assert(s_cctorGlobalLock.IsAcquired); for (int i = 0; i < s_nextBlockingRecordIndex; i++) { if (s_blockingRecords[i].ManagedThreadId == managedThreadId) return s_blockingRecords[i].BlockedOn; } return new CctorHandle(null, 0); } private static BlockingRecord[] s_blockingRecords; private static int s_nextBlockingRecordIndex; } private static Lock s_cctorGlobalLock = new Lock(); [Conditional("ENABLE_NOISY_CCTOR_LOG")] private static void NoisyLog(string format, IntPtr cctorMethod, int threadId) { // We cannot utilize any of the typical number formatting code because it triggers globalization code to run // and this cctor code is layered below globalization. #if DEBUG Debug.WriteLine(format, ToHexString(cctorMethod), ToHexString(threadId)); #endif // DEBUG } [Conditional("DEBUG")] private static void DebugLog(string format, IntPtr cctorMethod, int threadId) { // We cannot utilize any of the typical number formatting code because it triggers globalization code to run // and this cctor code is layered below globalization. #if DEBUG Debug.WriteLine(format, ToHexString(cctorMethod), ToHexString(threadId)); #endif } // We cannot utilize any of the typical number formatting code because it triggers globalization code to run // and this cctor code is layered below globalization. #if DEBUG static string ToHexString(int num) { return ToHexStringUnsignedLong((ulong)num, false, 8); } static string ToHexString(IntPtr num) { return ToHexStringUnsignedLong((ulong)num, false, 16); } static char GetHexChar(uint u) { if (u < 10) return unchecked((char)('0' + u)); return unchecked((char)('a' + (u - 10))); } static public unsafe string ToHexStringUnsignedLong(ulong u, bool zeroPrepad, int numChars) { char[] chars = new char[numChars]; int i = numChars - 1; for (; i >= 0; i--) { chars[i] = GetHexChar((uint)(u % 16)); u = u / 16; if ((i == 0) || (!zeroPrepad && (u == 0))) break; } string str; fixed (char* p = &chars[i]) { str = new String(p, 0, numChars - i); } return str; } #endif } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) Under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You Under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed Under the License is distributed on an "AS Is" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations Under the License. ==================================================================== */ namespace NPOI.HSSF.Record { using System; using System.Text; using NPOI.Util; using System.IO; using System.Collections.Generic; /** * The obj record is used to hold various graphic objects and controls. * * @author Glen Stampoultzis (glens at apache.org) */ public class ObjRecord : Record, ICloneable { private const int NORMAL_PAD_ALIGNMENT = 2; private const int MAX_PAD_ALIGNMENT = 4; public const short sid = 0x5D; private List<SubRecord> subrecords; /** used when POI has no idea what is going on */ private byte[] _uninterpretedData; /** * Excel seems to tolerate padding to quad or double byte length */ private bool _isPaddedToQuadByteMultiple; //00000000 15 00 12 00 01 00 01 00 11 60 00 00 00 00 00 0D .........`...... //00000010 26 01 00 00 00 00 00 00 00 00 &......... public ObjRecord() { subrecords = new List<SubRecord>(2); // TODO - ensure 2 sub-records (ftCmo 15h, and ftEnd 00h) are always created _uninterpretedData = null; } /** * Constructs a OBJ record and Sets its fields appropriately. * * @param in the RecordInputstream to Read the record from */ public ObjRecord(RecordInputStream in1) { // TODO - problems with OBJ sub-records stream // MS spec says first sub-record is always CommonObjectDataSubRecord, // and last is // always EndSubRecord. OOO spec does not mention ObjRecord(0x005D). // Existing POI test data seems to violate that rule. Some test data // seems to contain // garbage, and a crash is only averted by stopping at what looks like // the 'EndSubRecord' //Check if this can be continued, if so then the //following wont work properly //int subSize = 0; byte[] subRecordData = in1.ReadRemainder(); if (LittleEndian.GetUShort(subRecordData, 0) != CommonObjectDataSubRecord.sid) { // seems to occur in just one junit on "OddStyleRecord.xls" (file created by CrystalReports) // Excel tolerates the funny ObjRecord, and replaces it with a corrected version // The exact logic/reasoning is not yet understood _uninterpretedData = subRecordData; subrecords = null; return; } //if (subRecordData.Length % 2 != 0) //{ // String msg = "Unexpected length of subRecordData : " + HexDump.ToHex(subRecordData); // throw new RecordFormatException(msg); //} subrecords = new List<SubRecord>(); using (MemoryStream bais = new MemoryStream(subRecordData)) { LittleEndianInputStream subRecStream = new LittleEndianInputStream(bais); CommonObjectDataSubRecord cmo = (CommonObjectDataSubRecord)SubRecord.CreateSubRecord(subRecStream, 0); subrecords.Add(cmo); while (true) { SubRecord subRecord = SubRecord.CreateSubRecord(subRecStream, cmo.ObjectType); subrecords.Add(subRecord); if (subRecord.IsTerminating) { break; } } int nRemainingBytes = subRecStream.Available(); if (nRemainingBytes > 0) { // At present (Oct-2008), most unit test samples have (subRecordData.length % 2 == 0) _isPaddedToQuadByteMultiple = subRecordData.Length % MAX_PAD_ALIGNMENT == 0; if (nRemainingBytes >= (_isPaddedToQuadByteMultiple ? MAX_PAD_ALIGNMENT : NORMAL_PAD_ALIGNMENT)) { if (!CanPaddingBeDiscarded(subRecordData, nRemainingBytes)) { String msg = "Leftover " + nRemainingBytes + " bytes in subrecord data " + HexDump.ToHex(subRecordData); throw new RecordFormatException(msg); } _isPaddedToQuadByteMultiple = false; } } else { _isPaddedToQuadByteMultiple = false; } _uninterpretedData = null; } } /** * Some XLS files have ObjRecords with nearly 8Kb of excessive padding. These were probably * written by a version of POI (around 3.1) which incorrectly interpreted the second short of * the ftLbs subrecord (0x1FEE) as a length, and read that many bytes as padding (other bugs * helped allow this to occur). * * Excel reads files with this excessive padding OK, truncating the over-sized ObjRecord back * to the its proper size. POI does the same. */ private static bool CanPaddingBeDiscarded(byte[] data, int nRemainingBytes) { // make sure none of the padding looks important for (int i = data.Length - nRemainingBytes; i < data.Length; i++) { if (data[i] != 0x00) { return false; } } return true; } public override String ToString() { StringBuilder sb = new StringBuilder(); sb.Append("[OBJ]\n"); for (int i = 0; i < subrecords.Count; i++) { SubRecord record = subrecords[i]; sb.Append("SUBRECORD: ").Append(record.ToString()); } sb.Append("[/OBJ]\n"); return sb.ToString(); } public override int Serialize(int offset, byte [] data) { int recSize = RecordSize; int dataSize = recSize - 4; LittleEndianByteArrayOutputStream out1 = new LittleEndianByteArrayOutputStream(data, offset, recSize); out1.WriteShort(sid); out1.WriteShort(dataSize); if (_uninterpretedData == null) { for (int i = 0; i < subrecords.Count; i++) { SubRecord record = subrecords[i]; record.Serialize(out1); } int expectedEndIx = offset + dataSize; // padding while (out1.WriteIndex < expectedEndIx) { out1.WriteByte(0); } } else { out1.Write(_uninterpretedData); } return recSize; } /** * Size of record (excluding 4 byte header) */ public override int RecordSize { get { if (_uninterpretedData != null) { return _uninterpretedData.Length + 4; } int size = 0; for (int i = subrecords.Count - 1; i >= 0; i--) { SubRecord record = subrecords[i]; size += record.DataSize + 4; } if (_isPaddedToQuadByteMultiple) { while (size % MAX_PAD_ALIGNMENT != 0) { size++; } } else { while (size % NORMAL_PAD_ALIGNMENT != 0) { size++; } } return size + 4; } } public override short Sid { get { return sid; } } public List<SubRecord> SubRecords { get { return subrecords; } } public void ClearSubRecords() { subrecords.Clear(); } public void AddSubRecord(int index, SubRecord element) { subrecords.Insert(index, element); } public void AddSubRecord(SubRecord o) { subrecords.Add(o); } public override Object Clone() { ObjRecord rec = new ObjRecord(); for (int i = 0; i < subrecords.Count; i++) { SubRecord record = subrecords[i]; rec.AddSubRecord((SubRecord)record.Clone()); } return rec; } } }
/******************************************************************************* INTEL CORPORATION PROPRIETARY INFORMATION This software is supplied under the terms of a license agreement or nondisclosure agreement with Intel Corporation and may not be copied or disclosed except in accordance with the terms of that agreement Copyright(c) 2012-2014 Intel Corporation. All Rights Reserved. ******************************************************************************/ using UnityEngine; using System.Collections; using RSUnityToolkit; /// <summary> /// Point cloud viewer /// </summary> public class PointCloudViewer : MonoBehaviour { #region Public Fields /// <summary> /// The point cloud material /// </summary> public Material PointCloudMaterial = null; /// <summary> /// The max depth value. /// </summary> public float MaxDepthVal = 90f; /// <summary> /// When enabled UV Map will be used to show color image on the point cloud /// </summary> public bool UseUVMap = true; #endregion #region Private Fields private Mesh _mesh; private Vector3[] _vertices = null; private Vector2[] _uv = null; private Vector4[] _tangents = null; private int[] _triangles = null; private bool _removeBackTriangles = true; private bool _lastUseUVMAP = true; private DrawImages _drawImagesComponent; #endregion #region Private methods /// <summary> /// Sets the sense option according to the Stream field /// </summary> private void SetSenseOptions() { SenseToolkitManager.Instance.SetSenseOption(SenseOption.SenseOptionID.PointCloud); if (UseUVMap) { SenseToolkitManager.Instance.SetSenseOption(SenseOption.SenseOptionID.UVMap); } } /// <summary> /// Unsets the sense option according to the Stream field /// </summary> private void UnsetSenseOptions() { SenseToolkitManager.Instance.UnsetSenseOption(SenseOption.SenseOptionID.PointCloud); if (_lastUseUVMAP) { SenseToolkitManager.Instance.UnsetSenseOption(SenseOption.SenseOptionID.UVMap); } } #endregion #region Unity's overridden methods // Use this for initialization void Start () { var senseManager = GameObject.FindObjectOfType(typeof(SenseToolkitManager)); if (senseManager == null) { Debug.LogWarning("Sense Manager Object not found and was added automatically"); senseManager = (GameObject)Instantiate(Resources.Load("SenseManager")); senseManager.name = "SenseManager"; } SetSenseOptions(); this.gameObject.AddComponent< MeshFilter > (); if (this.GetComponent<MeshRenderer>() == null) { this.gameObject.AddComponent< MeshRenderer > (); } if (PointCloudMaterial!=null) { this.gameObject.GetComponent<Renderer>().material = PointCloudMaterial; } _drawImagesComponent = this.gameObject.AddComponent<DrawImages>(); if (UseUVMap) { _drawImagesComponent.enabled = true; } else { _drawImagesComponent.enabled = false; } } // Update is called once per frame void Update () { if (_lastUseUVMAP != UseUVMap) { UnsetSenseOptions(); SetSenseOptions(); if (UseUVMap) { _drawImagesComponent.enabled = true; } else { _drawImagesComponent.enabled = false; } _lastUseUVMAP = UseUVMap; } if (SenseToolkitManager.Instance.PointCloud != null) { if (_mesh == null) { // Retrieve a mesh instance _mesh = this.gameObject.GetComponent<MeshFilter> ().mesh; } int _gridSize = 3; int width = SenseToolkitManager.Instance.ImageDepthOutput.info.width/_gridSize; int height = SenseToolkitManager.Instance.ImageDepthOutput.info.height/_gridSize; // Build vertices and UVs if (_vertices == null) { _vertices = new Vector3[width * height]; } if (_tangents == null) { _tangents = new Vector4[width * height]; } if (_uv == null) { _uv = new Vector2[width * height]; } int i = 0; for (int y=0; y < height; y++) { for (int x=0; x < width - 1; x++) { int j = y * width * _gridSize * _gridSize + x * _gridSize; _vertices [i].x = SenseToolkitManager.Instance.PointCloud[j].x / 10; _vertices [i].y = SenseToolkitManager.Instance.PointCloud[j].y / 10; _vertices [i].z = -SenseToolkitManager.Instance.PointCloud[j].z / 10; if (UseUVMap) { _uv[i].x = SenseToolkitManager.Instance.UvMap[j].x ; _uv[i].y = SenseToolkitManager.Instance.UvMap[j].y ; } i++; } } // Assign them to the mesh _mesh.vertices = _vertices; _mesh.uv = _uv; // Build triangle indices: 3 indices into vertex array for each triangle if (_triangles == null) { _triangles = new int[(height - 1) * (width - 1) *6]; } bool backGroundTriangles = false; int index =0; for ( int y = 0; y < height - 1 ; y++ ) { for ( int x = 0; x < width - 1; x++ ) { if (_removeBackTriangles) { backGroundTriangles = ( ( Mathf.Abs(_vertices[y * width + x].z) > MaxDepthVal ) || ( Mathf.Abs(_vertices[y * width + x + 1].z) > MaxDepthVal ) || ( Mathf.Abs(_vertices[(y + 1) * width + x].z )> MaxDepthVal ) || ( Mathf.Abs(_vertices[(y + 1) * width + x + 1].z) > MaxDepthVal ) ); backGroundTriangles = backGroundTriangles || ( ( Mathf.Abs(_vertices[y * width + x].z) == 0 ) || ( Mathf.Abs(_vertices[y * width + x + 1].z) == 0 ) || ( Mathf.Abs(_vertices[(y + 1) * width + x].z ) == 0 ) || ( Mathf.Abs(_vertices[(y + 1) * width + x + 1].z) == 0 ) ); } if (!backGroundTriangles) { // For each grid cell output two triangles _triangles [index++] = (y * width) + x; _triangles [index++] = ((y + 1) * width) + x; _triangles [index++] = (y * width) + x + 1; _triangles [index++] = ((y + 1) * width) + x; _triangles [index++] = ((y + 1) * width) + x + 1; _triangles [index++] = (y * width) + x + 1; } } } for ( ; index < (height - 1) * (width - 1) * 6 ; index++) { _triangles[index] = 0; } _mesh.triangles = _triangles; // Auto-calculate vertex normals from the mesh _mesh.Optimize(); _mesh.RecalculateNormals (); } } //On enable set sense options void OnEnable() { if (SenseToolkitManager.Instance == null) { return; } SetSenseOptions(); } //On disable unset sense options void OnDisable() { UnsetSenseOptions(); } #endregion }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace CentralConfig.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
using System; using System.Text; namespace MbUnit.Framework.Tests.Asserts { [TestFixture] public class ArrayAssert_Test { [Test] public void AreEqualBool() { bool[] arr1 = new bool[5]; bool[] arr2 = new bool[5]; arr1[0] = true; arr1[1] = false; arr1[2] = true; arr1[3] = false; arr2[0] = true; arr2[1] = false; arr2[2] = true; arr2[3] = false; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualBoolNull() { bool[] arr1 = null; bool[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualChar() { Char[] arr1 = new Char[5]; Char[] arr2 = new Char[5]; arr1[0] = char.MaxValue; arr1[1] = char.MinValue; arr1[2] = char.MaxValue; arr1[3] = char.MinValue; arr2[0] = char.MaxValue; arr2[1] = char.MinValue; arr2[2] = char.MaxValue; arr2[3] = char.MinValue; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualCharNull() { Char[] arr1 = null; Char[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualByte() { byte[] arr1 = new byte[5]; byte[] arr2 = new byte[5]; arr1[0] = byte.MaxValue; arr1[1] = byte.MinValue; arr1[2] = byte.MaxValue; arr1[3] = byte.MinValue; arr2[0] = byte.MaxValue; arr2[1] = byte.MinValue; arr2[2] = byte.MaxValue; arr2[3] = byte.MinValue; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualByteNull() { byte[] arr1 = null; byte[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualInt() { int[] arr1 = new int[5]; int[] arr2 = new int[5]; arr1[0] = int.MaxValue; arr1[1] = int.MinValue; arr1[2] = int.MaxValue; arr1[3] = int.MinValue; arr2[0] = int.MaxValue; arr2[1] = int.MinValue; arr2[2] = int.MaxValue; arr2[3] = int.MinValue; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualIntNull() { int[] arr1 = null; int[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualLong() { long[] arr1 = new long[5]; long[] arr2 = new long[5]; arr1[0] = long.MaxValue; arr1[1] = long.MinValue; arr1[2] = long.MaxValue; arr1[3] = long.MinValue; arr2[0] = long.MaxValue; arr2[1] = long.MinValue; arr2[2] = long.MaxValue; arr2[3] = long.MinValue; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualLongNull() { long[] arr1 = null; long[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualFloat() { float[] arr1 = new float[5]; float[] arr2 = new float[5]; arr1[0] = float.MaxValue; arr1[1] = float.MinValue; arr1[2] = float.MaxValue; arr1[3] = float.MinValue; arr2[0] = float.MaxValue; arr2[1] = float.MinValue; arr2[2] = float.MaxValue; arr2[3] = float.MinValue; ArrayAssert.AreEqual(arr1, arr2, 0); } [Test] public void AreEqualFloatNull() { float[] arr1 = null; float[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2, 0); } [Test] public void AreEqualDouble() { double[] arr1 = new double[5]; double[] arr2 = new double[5]; arr1[0] = double.MaxValue; arr1[1] = double.MinValue; arr1[2] = double.MaxValue; arr1[3] = double.MinValue; arr2[0] = double.MaxValue; arr2[1] = double.MinValue; arr2[2] = double.MaxValue; arr2[3] = double.MinValue; ArrayAssert.AreEqual(arr1, arr2, 0); } [Test] public void AreEqualDoubleNull() { double[] arr1 = null; double[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2, 0); } [Test] public void AreEqualObject() { object[] arr1 = new object[5]; object[] arr2 = new object[5]; arr1[0] = true; arr1[1] = char.MaxValue; arr1[2] = float.MaxValue; arr1[3] = double.MinValue; arr2[0] = true; arr2[1] = char.MaxValue; arr2[2] = float.MaxValue; arr2[3] = double.MinValue; ArrayAssert.AreEqual(arr1, arr2); } [Test] public void AreEqualObjectNull() { object[] arr1 = null; object[] arr2 = null; ArrayAssert.AreEqual(arr1, arr2); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Concurrent; namespace Avro.Reflect { /// <summary> /// Class holds a cache of C# classes and their properties. The key for the cache is the schema full name. /// </summary> public class ClassCache { private static ConcurrentBag<IAvroFieldConverter> _defaultConverters = new ConcurrentBag<IAvroFieldConverter>(); private ConcurrentDictionary<string, DotnetClass> _nameClassMap = new ConcurrentDictionary<string, DotnetClass>(); private ConcurrentDictionary<string, Type> _nameArrayMap = new ConcurrentDictionary<string, Type>(); private ConcurrentDictionary<string, Schema> _previousFields = new ConcurrentDictionary<string, Schema>(); private void AddClassNameMapItem(RecordSchema schema, Type dotnetClass) { if (schema != null && GetClass(schema) != null) { return; } if (!dotnetClass.IsClass) { throw new AvroException($"Type {dotnetClass.Name} is not a class"); } _nameClassMap.TryAdd(schema.Fullname, new DotnetClass(dotnetClass, schema, this)); } /// <summary> /// Add a default field converter /// </summary> /// <param name="converter"></param> public static void AddDefaultConverter(IAvroFieldConverter converter) { _defaultConverters.Add(converter); } /// <summary> /// Add a converter defined using Func&lt;&gt;. The converter will be used whenever the source and target types /// match and a specific attribute is not defined. /// </summary> /// <param name="from"></param> /// <param name="to"></param> /// <typeparam name="TAvro"></typeparam> /// <typeparam name="TProperty"></typeparam> public static void AddDefaultConverter<TAvro, TProperty>(Func<TAvro, Schema, TProperty> from, Func<TProperty, Schema, TAvro> to) { _defaultConverters.Add(new FuncFieldConverter<TAvro, TProperty>(from, to)); } /// <summary> /// Find a default converter /// </summary> /// <param name="tag"></param> /// <param name="propType"></param> /// <returns>The first matching converter - null if there isnt one</returns> public IAvroFieldConverter GetDefaultConverter(Avro.Schema.Type tag, Type propType) { Type avroType; switch (tag) { case Avro.Schema.Type.Null: return null; case Avro.Schema.Type.Boolean: avroType = typeof(bool); break; case Avro.Schema.Type.Int: avroType = typeof(int); break; case Avro.Schema.Type.Long: avroType = typeof(long); break; case Avro.Schema.Type.Float: avroType = typeof(float); break; case Avro.Schema.Type.Double: avroType = typeof(double); break; case Avro.Schema.Type.Bytes: avroType = typeof(byte[]); break; case Avro.Schema.Type.String: avroType = typeof(string); break; case Avro.Schema.Type.Record: return null; case Avro.Schema.Type.Enumeration: return null; case Avro.Schema.Type.Array: return null; case Avro.Schema.Type.Map: return null; case Avro.Schema.Type.Union: return null; case Avro.Schema.Type.Fixed: avroType = typeof(byte[]); break; case Avro.Schema.Type.Error: return null; default: return null; } foreach (var c in _defaultConverters) { if (c.GetAvroType() == avroType && c.GetPropertyType() == propType) { return c; } } return null; } /// <summary> /// Add an array helper. Array helpers are used for collections that are not generic lists. /// </summary> /// <param name="name">Name of the helper. Corresponds to metadata "helper" field in the schema.</param> /// <param name="helperType">Type of helper. Inherited from ArrayHelper</param> public void AddArrayHelper(string name, Type helperType) { if (!typeof(ArrayHelper).IsAssignableFrom(helperType)) { throw new AvroException($"{helperType.Name} is not an ArrayHelper"); } _nameArrayMap.TryAdd(name, helperType); } /// <summary> /// Find an array helper for an array schema node. /// </summary> /// <param name="schema">Schema</param> /// <param name="enumerable">The array object. If it is null then Add(), Count() and Clear methods will throw exceptions.</param> /// <returns></returns> public ArrayHelper GetArrayHelper(ArraySchema schema, IEnumerable enumerable) { Type h; // note ArraySchema is unamed and doesnt have a FulllName, use "helper" metadata // metadata is json string, strip quotes string s = null; s = schema.GetHelper(); if (s != null && _nameArrayMap.TryGetValue(s, out h)) { return (ArrayHelper)Activator.CreateInstance(h, enumerable); } return (ArrayHelper)Activator.CreateInstance(typeof(ArrayHelper), enumerable); } /// <summary> /// Find a class that matches the schema full name. /// </summary> /// <param name="schema"></param> /// <returns></returns> public DotnetClass GetClass(RecordSchema schema) { DotnetClass c; if (!_nameClassMap.TryGetValue(schema.Fullname, out c)) { return null; } return c; } /// <summary> /// Add an entry to the class cache. /// </summary> /// <param name="objType">Type of the C# class</param> /// <param name="s">Schema</param> public void LoadClassCache(Type objType, Schema s) { switch (s) { case RecordSchema rs: if (!objType.IsClass) { throw new AvroException($"Cant map scalar type {objType.Name} to record {rs.Fullname}"); } if (typeof(byte[]).IsAssignableFrom(objType) || typeof(string).IsAssignableFrom(objType) || typeof(IEnumerable).IsAssignableFrom(objType) || typeof(IDictionary).IsAssignableFrom(objType)) { throw new AvroException($"Cant map type {objType.Name} to record {rs.Fullname}"); } AddClassNameMapItem(rs, objType); var c = GetClass(rs); foreach (var f in rs.Fields) { /* //.StackOverflowException var t = c.GetPropertyType(f); LoadClassCache(t, f.Schema); */ if (_previousFields.TryAdd(f.Name, f.Schema)) { var t = c.GetPropertyType(f); LoadClassCache(t, f.Schema); } } break; case ArraySchema ars: if (!typeof(IEnumerable).IsAssignableFrom(objType)) { throw new AvroException($"Cant map type {objType.Name} to array {ars.Name}"); } if (!objType.IsGenericType) { throw new AvroException($"{objType.Name} needs to be a generic type"); } LoadClassCache(objType.GenericTypeArguments[0], ars.ItemSchema); break; case MapSchema ms: if (!typeof(IDictionary).IsAssignableFrom(objType)) { throw new AvroException($"Cant map type {objType.Name} to map {ms.Name}"); } if (!objType.IsGenericType) { throw new AvroException($"Cant map non-generic type {objType.Name} to map {ms.Name}"); } if (!typeof(string).IsAssignableFrom(objType.GenericTypeArguments[0])) { throw new AvroException($"First type parameter of {objType.Name} must be assignable to string"); } LoadClassCache(objType.GenericTypeArguments[1], ms.ValueSchema); break; case NamedSchema ns: EnumCache.AddEnumNameMapItem(ns, objType); break; case UnionSchema us: if (us.Schemas.Count == 2 && (us.Schemas[0].Tag == Schema.Type.Null || us.Schemas[1].Tag == Schema.Type.Null) && objType.IsClass) { // in this case objType will match the non null type in the union foreach (var o in us.Schemas) { if (o.Tag != Schema.Type.Null) { LoadClassCache(objType, o); } } } else { // check the schema types are registered foreach (var o in us.Schemas) { if (o.Tag == Schema.Type.Record && GetClass(o as RecordSchema) == null) { throw new AvroException($"Class for union record type {o.Fullname} is not registered. Create a ClassCache object and call LoadClassCache"); } } } break; } } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using System.Threading; using Nini.Config; using OpenMetaverse; using Aurora.Framework; using OpenSim.Region.Framework.Interfaces; namespace Aurora.BotManager { public class BotManager : ISharedRegionModule, IBotManager { private readonly Dictionary<UUID, Bot> m_bots = new Dictionary<UUID, Bot>(); #region ISharedRegionModule Members public void Initialise(IConfigSource source) { } public void AddRegion(IScene scene) { scene.RegisterModuleInterface<IBotManager>(this); scene.RegisterModuleInterface(this); } public void RemoveRegion(IScene scene) { } public void RegionLoaded(IScene scene) { } public void PostInitialise() { } public void Close() { m_bots.Clear(); } public Type ReplaceableInterface { get { return null; } } public string Name { get { return GetType().AssemblyQualifiedName; } } #endregion #region IBotManager /// <summary> /// Creates a new bot inworld /// </summary> /// <param name = "FirstName"></param> /// <param name = "LastName"></param> /// <param name = "cloneAppearanceFrom">UUID of the avatar whos appearance will be copied to give this bot an appearance</param> /// <returns>ID of the bot</returns> public UUID CreateAvatar(string FirstName, string LastName, IScene scene, UUID cloneAppearanceFrom, UUID creatorID, Vector3 startPos) { AgentCircuitData m_aCircuitData = new AgentCircuitData { child = false, circuitcode = (uint) Util.RandomClass.Next(), Appearance = GetAppearance(cloneAppearanceFrom, scene) }; //Add the circuit data so they can login //Sets up appearance if (m_aCircuitData.Appearance == null) { m_aCircuitData.Appearance = new AvatarAppearance {Wearables = AvatarWearable.DefaultWearables}; } //Create the new bot data BotClientAPI m_character = new BotClientAPI(scene, m_aCircuitData) { FirstName = FirstName, LastName = LastName }; m_aCircuitData.AgentID = m_character.AgentId; m_aCircuitData.Appearance.Owner = m_character.AgentId; List<AvatarAttachment> attachments = m_aCircuitData.Appearance.GetAttachments(); m_aCircuitData.Appearance.ClearAttachments(); foreach (AvatarAttachment t in attachments) { InventoryItemBase item = scene.InventoryService.GetItem(new InventoryItemBase(t.ItemID)); if (item != null) { item.ID = UUID.Random(); item.Owner = m_character.AgentId; item.Folder = UUID.Zero; scene.InventoryService.AddItemAsync(item, null); //Now fix the ItemID m_aCircuitData.Appearance.SetAttachment(t.AttachPoint, item.ID, t.AssetID); } } scene.AuthenticateHandler.AgentCircuits.Add(m_character.CircuitCode, m_aCircuitData); //This adds them to the scene and sets them inworld AddAndWaitUntilAgentIsAdded(scene, m_character); IScenePresence SP = scene.GetScenePresence(m_character.AgentId); if (SP == null) return UUID.Zero; //Failed! Bot bot = new Bot(); bot.Initialize(SP, creatorID); SP.MakeRootAgent(startPos, false, true); //Move them SP.Teleport(startPos); foreach(var presence in scene.GetScenePresences()) presence.SceneViewer.QueuePresenceForUpdate(SP, PrimUpdateFlags.ForcedFullUpdate); IAttachmentsModule attModule = SP.Scene.RequestModuleInterface<IAttachmentsModule>(); if (attModule != null) foreach (AvatarAttachment att in attachments) attModule.RezSingleAttachmentFromInventory(SP.ControllingClient, att.ItemID, att.AssetID, 0, true); IAvatarAppearanceModule appearance = SP.RequestModuleInterface<IAvatarAppearanceModule>(); appearance.InitialHasWearablesBeenSent = true; //Save them in the bots list m_bots.Add(m_character.AgentId, bot); AddTagToBot(m_character.AgentId, "AllBots", bot.AvatarCreatorID); MainConsole.Instance.Info("[RexBotManager]: Added bot " + m_character.Name + " to scene."); //Return their UUID return m_character.AgentId; } private static void AddAndWaitUntilAgentIsAdded(IScene scene, BotClientAPI m_character) { bool done = false; scene.AddNewClient(m_character, delegate { done = true; }); while (!done) Thread.Sleep(3); } public void RemoveAvatar(UUID avatarID, IScene scene, UUID userAttempting) { IEntity sp = scene.GetScenePresence(avatarID); if (sp == null) { sp = scene.GetSceneObjectPart(avatarID); if (sp == null) return; sp = ((ISceneChildEntity)sp).ParentEntity; } if (!CheckPermission(sp, userAttempting)) return; RemoveAllTagsFromBot(avatarID, userAttempting); if (!m_bots.Remove(avatarID)) return; //Kill the agent IEntityTransferModule module = scene.RequestModuleInterface<IEntityTransferModule>(); module.IncomingCloseAgent(scene, avatarID); } public void PauseMovement(UUID botID, UUID userAttempting) { Bot bot; //Find the bot if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.PauseMovement(); } } public void ResumeMovement(UUID botID, UUID userAttempting) { Bot bot; //Find the bot if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.ResumeMovement(); } } /// <summary> /// Sets up where the bot should be walking /// </summary> /// <param name = "Bot">ID of the bot</param> /// <param name = "Positions">List of positions the bot will move to</param> /// <param name = "mode">List of what the bot should be doing inbetween the positions</param> public void SetBotMap(UUID Bot, List<Vector3> Positions, List<TravelMode> mode, int flags, UUID userAttempting) { Bot bot; //Find the bot if (m_bots.TryGetValue(Bot, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.SetPath(Positions, mode, flags); } } /// <summary> /// Speed up or slow down the bot /// </summary> /// <param name = "Bot"></param> /// <param name = "modifier"></param> public void SetMovementSpeedMod(UUID Bot, float modifier, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(Bot, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.SetMovementSpeedMod(modifier); } } public void SetBotShouldFly(UUID botID, bool shouldFly, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; if (shouldFly) bot.DisableWalk(); else bot.EnableWalk(); } } #region Tag/Remove bots private readonly Dictionary<string, List<UUID>> m_botTags = new Dictionary<string, List<UUID>>(); public void AddTagToBot(UUID Bot, string tag, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(Bot, out bot)) { if (!CheckPermission(bot, userAttempting)) return; } if (!m_botTags.ContainsKey(tag)) m_botTags.Add(tag, new List<UUID>()); m_botTags[tag].Add(Bot); } public List<UUID> GetBotsWithTag(string tag) { if (!m_botTags.ContainsKey(tag)) return new List<UUID>(); return new List<UUID>(m_botTags[tag]); } public void RemoveBots(string tag, UUID userAttempting) { List<UUID> bots = GetBotsWithTag(tag); foreach (UUID bot in bots) { Bot Bot; if (m_bots.TryGetValue(bot, out Bot)) { if (!CheckPermission(Bot, userAttempting)) continue; RemoveTagFromBot(bot, tag, userAttempting); RemoveAvatar(bot, Bot.Controller.GetScene(), userAttempting); } } } public void RemoveTagFromBot(UUID Bot, string tag, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(Bot, out bot)) { if (!CheckPermission(bot, userAttempting)) return; } if (m_botTags.ContainsKey(tag)) m_botTags[tag].Remove(Bot); } public void RemoveAllTagsFromBot(UUID Bot, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(Bot, out bot)) { if (!CheckPermission(bot, userAttempting)) return; } List<string> tagsToRemove = new List<string>(); foreach(KeyValuePair<string, List<UUID>> kvp in m_botTags) { if (kvp.Value.Contains(Bot)) tagsToRemove.Add(kvp.Key); } foreach(string tag in tagsToRemove) m_botTags[tag].Remove(Bot); } #endregion /// <summary> /// Finds the given users appearance /// </summary> /// <param name = "target"></param> /// <param name = "scene"></param> /// <returns></returns> private AvatarAppearance GetAppearance(UUID target, IScene scene) { IScenePresence sp = scene.GetScenePresence(target); if (sp != null) { IAvatarAppearanceModule aa = sp.RequestModuleInterface<IAvatarAppearanceModule>(); if (aa != null) return new AvatarAppearance(aa.Appearance); } return scene.AvatarService.GetAppearance(target); } private bool CheckPermission(IEntity sp, UUID userAttempting) { foreach (Bot bot in m_bots.Values) { if (bot.Controller.UUID == sp.UUID) return bot.AvatarCreatorID == userAttempting; } return false; } private bool CheckPermission(Bot bot, UUID userAttempting) { if (userAttempting == UUID.Zero) return true; //Forced override if (bot != null) return bot.AvatarCreatorID == userAttempting; return false; } #endregion #region IBotManager /// <summary> /// Begins to follow the given user /// </summary> /// <param name = "Bot"></param> /// <param name = "modifier"></param> public void FollowAvatar(UUID botID, string avatarName, float startFollowDistance, float endFollowDistance, bool requireLOS, Vector3 offsetFromAvatar, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.FollowAvatar(avatarName, startFollowDistance, endFollowDistance, offsetFromAvatar, requireLOS); } } /// <summary> /// Stops following the given user /// </summary> /// <param name = "Bot"></param> /// <param name = "modifier"></param> public void StopFollowAvatar(UUID botID, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.StopFollowAvatar(); } } /// <summary> /// Sends a chat message to all clients /// </summary> /// <param name = "Bot"></param> /// <param name = "modifier"></param> public void SendChatMessage(UUID botID, string message, int sayType, int channel, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.SendChatMessage(sayType, message, channel); } } /// <summary> /// Sends a chat message to all clients /// </summary> /// <param name = "Bot"></param> /// <param name = "modifier"></param> public void SendIM(UUID botID, UUID toUser, string message, UUID userAttempting) { Bot bot; if (m_bots.TryGetValue(botID, out bot)) { if (!CheckPermission(bot, userAttempting)) return; bot.SendInstantMessage(new GridInstantMessage() { binaryBucket = new byte[0], dialog = (byte)InstantMessageDialog.MessageFromAgent, message = message, fromAgentID = botID, fromAgentName = bot.Controller.Name, fromGroup = false, imSessionID = UUID.Random(), offline = 0, ParentEstateID = 0, RegionID = bot.Controller.GetScene().RegionInfo.RegionID, timestamp = (uint)Util.UnixTimeSinceEpoch(), toAgentID = toUser }); } } #endregion #region Character Management public void CreateCharacter(UUID primID, IScene scene) { RemoveCharacter(primID); ISceneEntity entity = scene.GetSceneObjectPart(primID).ParentEntity; Bot bot = new Bot(); bot.Initialize(entity); m_bots.Add(primID, bot); AddTagToBot(primID, "AllBots", bot.AvatarCreatorID); } public IBotController GetCharacterManager(UUID primID) { foreach (Bot bot in m_bots.Values) { if (bot.Controller.UUID == primID) return bot.Controller; } return null; } public void RemoveCharacter(UUID primID) { if (m_bots.ContainsKey(primID)) { Bot b = m_bots[primID]; b.Close(true); RemoveAllTagsFromBot(primID, UUID.Zero); m_bots.Remove(primID); } } #endregion } }
// Lucene version compatibility level 4.8.1 using J2N; using System.Globalization; namespace YAF.Lucene.Net.Analysis.Miscellaneous { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// A BreakIterator-like API for iterating over subwords in text, according to <see cref="WordDelimiterFilter"/> rules. /// @lucene.internal /// </summary> public sealed class WordDelimiterIterator { /// <summary> /// Indicates the end of iteration </summary> public const int DONE = -1; public static readonly byte[] DEFAULT_WORD_DELIM_TABLE = LoadDefaultWordDelimTable(); internal char[] text; private int length; /// <summary> /// start position of text, excluding leading delimiters </summary> private int startBounds; /// <summary> /// end position of text, excluding trailing delimiters </summary> private int endBounds; /// <summary> /// Beginning of subword </summary> internal int current; /// <summary> /// End of subword </summary> internal int end; /// <summary>does this string end with a possessive such as 's</summary> private bool hasFinalPossessive = false; /// <summary> /// If false, causes case changes to be ignored (subwords will only be generated /// given SUBWORD_DELIM tokens). (Defaults to true) /// </summary> private readonly bool splitOnCaseChange; /// <summary> /// If false, causes numeric changes to be ignored (subwords will only be generated /// given SUBWORD_DELIM tokens). (Defaults to true) /// </summary> private readonly bool splitOnNumerics; /// <summary> /// If true, causes trailing "'s" to be removed for each subword. (Defaults to true) /// <p/> /// "O'Neil's" => "O", "Neil" /// </summary> private readonly bool stemEnglishPossessive; private readonly byte[] charTypeTable; /// <summary> /// if true, need to skip over a possessive found in the last call to next() </summary> private bool skipPossessive = false; // TODO: should there be a WORD_DELIM category for chars that only separate words (no catenation of subwords will be // done if separated by these chars?) "," would be an obvious candidate... private static byte[] LoadDefaultWordDelimTable() // LUCENENET: Avoid static constructors (see https://github.com/apache/lucenenet/pull/224#issuecomment-469284006) { var tab = new byte[256]; for (int i = 0; i < 256; i++) { byte code = 0; if (Character.IsLower(i)) { code |= WordDelimiterFilter.LOWER; } else if (Character.IsUpper(i)) { code |= WordDelimiterFilter.UPPER; } else if (Character.IsDigit(i)) { code |= WordDelimiterFilter.DIGIT; } if (code == 0) { code = WordDelimiterFilter.SUBWORD_DELIM; } tab[i] = code; } return tab; } /// <summary> /// Create a new <see cref="WordDelimiterIterator"/> operating with the supplied rules. /// </summary> /// <param name="charTypeTable"> table containing character types </param> /// <param name="splitOnCaseChange"> if true, causes "PowerShot" to be two tokens; ("Power-Shot" remains two parts regards) </param> /// <param name="splitOnNumerics"> if true, causes "j2se" to be three tokens; "j" "2" "se" </param> /// <param name="stemEnglishPossessive"> if true, causes trailing "'s" to be removed for each subword: "O'Neil's" => "O", "Neil" </param> internal WordDelimiterIterator(byte[] charTypeTable, bool splitOnCaseChange, bool splitOnNumerics, bool stemEnglishPossessive) { this.charTypeTable = charTypeTable; this.splitOnCaseChange = splitOnCaseChange; this.splitOnNumerics = splitOnNumerics; this.stemEnglishPossessive = stemEnglishPossessive; } /// <summary> /// Advance to the next subword in the string. /// </summary> /// <returns> index of the next subword, or <see cref="DONE"/> if all subwords have been returned </returns> internal int Next() { current = end; if (current == DONE) { return DONE; } if (skipPossessive) { current += 2; skipPossessive = false; } int lastType = 0; while (current < endBounds && (WordDelimiterFilter.IsSubwordDelim(lastType = CharType(text[current])))) { current++; } if (current >= endBounds) { return end = DONE; } for (end = current + 1; end < endBounds; end++) { int type = CharType(text[end]); if (IsBreak(lastType, type)) { break; } lastType = type; } if (end < endBounds - 1 && EndsWithPossessive(end + 2)) { skipPossessive = true; } return end; } /// <summary> /// Return the type of the current subword. /// This currently uses the type of the first character in the subword. /// </summary> /// <returns> type of the current word </returns> internal int Type { get { if (end == DONE) { return 0; } int type = CharType(text[current]); switch (type) { // return ALPHA word type for both lower and upper case WordDelimiterFilter.LOWER: case WordDelimiterFilter.UPPER: return WordDelimiterFilter.ALPHA; default: return type; } } } /// <summary> /// Reset the text to a new value, and reset all state /// </summary> /// <param name="text"> New text </param> /// <param name="length"> length of the text </param> internal void SetText(char[] text, int length) { this.text = text; this.length = this.endBounds = length; current = startBounds = end = 0; skipPossessive = hasFinalPossessive = false; SetBounds(); } // ================================================= Helper Methods ================================================ /// <summary> /// Determines whether the transition from lastType to type indicates a break /// </summary> /// <param name="lastType"> Last subword type </param> /// <param name="type"> Current subword type </param> /// <returns> <c>true</c> if the transition indicates a break, <c>false</c> otherwise </returns> private bool IsBreak(int lastType, int type) { if ((type & lastType) != 0) { return false; } if (!splitOnCaseChange && WordDelimiterFilter.IsAlpha(lastType) && WordDelimiterFilter.IsAlpha(type)) { // ALPHA->ALPHA: always ignore if case isn't considered. return false; } else if (WordDelimiterFilter.IsUpper(lastType) && WordDelimiterFilter.IsAlpha(type)) { // UPPER->letter: Don't split return false; } else if (!splitOnNumerics && ((WordDelimiterFilter.IsAlpha(lastType) && WordDelimiterFilter.IsDigit(type)) || (WordDelimiterFilter.IsDigit(lastType) && WordDelimiterFilter.IsAlpha(type)))) { // ALPHA->NUMERIC, NUMERIC->ALPHA :Don't split return false; } return true; } /// <summary> /// Determines if the current word contains only one subword. Note, it could be potentially surrounded by delimiters /// </summary> /// <returns> <c>true</c> if the current word contains only one subword, <c>false</c> otherwise </returns> internal bool IsSingleWord() { if (hasFinalPossessive) { return current == startBounds && end == endBounds - 2; } else { return current == startBounds && end == endBounds; } } /// <summary> /// Set the internal word bounds (remove leading and trailing delimiters). Note, if a possessive is found, don't remove /// it yet, simply note it. /// </summary> private void SetBounds() { while (startBounds < length && (WordDelimiterFilter.IsSubwordDelim(CharType(text[startBounds])))) { startBounds++; } while (endBounds > startBounds && (WordDelimiterFilter.IsSubwordDelim(CharType(text[endBounds - 1])))) { endBounds--; } if (EndsWithPossessive(endBounds)) { hasFinalPossessive = true; } current = startBounds; } /// <summary> /// Determines if the text at the given position indicates an English possessive which should be removed /// </summary> /// <param name="pos"> Position in the text to check if it indicates an English possessive </param> /// <returns> <c>true</c> if the text at the position indicates an English posessive, <c>false</c> otherwise </returns> private bool EndsWithPossessive(int pos) { return (stemEnglishPossessive && pos > 2 && text[pos - 2] == '\'' && (text[pos - 1] == 's' || text[pos - 1] == 'S') && WordDelimiterFilter.IsAlpha(CharType(text[pos - 3])) && (pos == endBounds || WordDelimiterFilter.IsSubwordDelim(CharType(text[pos])))); } /// <summary> /// Determines the type of the given character /// </summary> /// <param name="ch"> Character whose type is to be determined </param> /// <returns> Type of the character </returns> private int CharType(int ch) { if (ch < charTypeTable.Length) { return charTypeTable[ch]; } return GetType(ch); } /// <summary> /// Computes the type of the given character /// </summary> /// <param name="ch"> Character whose type is to be determined </param> /// <returns> Type of the character </returns> public static byte GetType(int ch) { switch (Character.GetType(ch)) { case UnicodeCategory.UppercaseLetter: return WordDelimiterFilter.UPPER; case UnicodeCategory.LowercaseLetter: return WordDelimiterFilter.LOWER; case UnicodeCategory.TitlecaseLetter: case UnicodeCategory.ModifierLetter: case UnicodeCategory.OtherLetter: case UnicodeCategory.NonSpacingMark: case UnicodeCategory.EnclosingMark: // depends what it encloses? case UnicodeCategory.SpacingCombiningMark: return WordDelimiterFilter.ALPHA; case UnicodeCategory.DecimalDigitNumber: case UnicodeCategory.LetterNumber: case UnicodeCategory.OtherNumber: return WordDelimiterFilter.DIGIT; // case Character.SPACE_SEPARATOR: // case Character.LINE_SEPARATOR: // case Character.PARAGRAPH_SEPARATOR: // case Character.CONTROL: // case Character.FORMAT: // case Character.PRIVATE_USE: case UnicodeCategory.Surrogate: return WordDelimiterFilter.ALPHA | WordDelimiterFilter.DIGIT; // case Character.DASH_PUNCTUATION: // case Character.START_PUNCTUATION: // case Character.END_PUNCTUATION: // case Character.CONNECTOR_PUNCTUATION: // case Character.OTHER_PUNCTUATION: // case Character.MATH_SYMBOL: // case Character.CURRENCY_SYMBOL: // case Character.MODIFIER_SYMBOL: // case Character.OTHER_SYMBOL: // case Character.INITIAL_QUOTE_PUNCTUATION: // case Character.FINAL_QUOTE_PUNCTUATION: default: return WordDelimiterFilter.SUBWORD_DELIM; } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Compute { using Azure; using Management; using Rest; using Rest.Azure; using Models; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// ContainerServicesOperations operations. /// </summary> public partial interface IContainerServicesOperations { /// <summary> /// Gets a list of container services in the specified subscription. /// </summary> /// <remarks> /// Gets a list of container services in the specified subscription. /// The operation returns properties of each container service /// including state, orchestrator, number of masters and agents, and /// FQDNs of masters and agents. /// </remarks> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ContainerService>>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates or updates a container service. /// </summary> /// <remarks> /// Creates or updates a container service with the specified /// configuration of orchestrator, masters, and agents. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='containerServiceName'> /// The name of the container service in the specified subscription and /// resource group. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create or Update a Container Service /// operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ContainerService>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string containerServiceName, ContainerService parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets the properties of the specified container service. /// </summary> /// <remarks> /// Gets the properties of the specified container service in the /// specified subscription and resource group. The operation returns /// the properties including state, orchestrator, number of masters and /// agents, and FQDNs of masters and agents. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='containerServiceName'> /// The name of the container service in the specified subscription and /// resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ContainerService>> GetWithHttpMessagesAsync(string resourceGroupName, string containerServiceName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified container service. /// </summary> /// <remarks> /// Deletes the specified container service in the specified /// subscription and resource group. The operation does not delete /// other resources created as part of creating a container service, /// including storage accounts, VMs, and availability sets. All the /// other resources created with the container service are part of the /// same resource group and can be deleted individually. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='containerServiceName'> /// The name of the container service in the specified subscription and /// resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string containerServiceName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets a list of container services in the specified resource group. /// </summary> /// <remarks> /// Gets a list of container services in the specified subscription and /// resource group. The operation returns properties of each container /// service including state, orchestrator, number of masters and /// agents, and FQDNs of masters and agents. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ContainerService>>> ListByResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Creates or updates a container service. /// </summary> /// <remarks> /// Creates or updates a container service with the specified /// configuration of orchestrator, masters, and agents. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='containerServiceName'> /// The name of the container service in the specified subscription and /// resource group. /// </param> /// <param name='parameters'> /// Parameters supplied to the Create or Update a Container Service /// operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<ContainerService>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string containerServiceName, ContainerService parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Deletes the specified container service. /// </summary> /// <remarks> /// Deletes the specified container service in the specified /// subscription and resource group. The operation does not delete /// other resources created as part of creating a container service, /// including storage accounts, VMs, and availability sets. All the /// other resources created with the container service are part of the /// same resource group and can be deleted individually. /// </remarks> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='containerServiceName'> /// The name of the container service in the specified subscription and /// resource group. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string containerServiceName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets a list of container services in the specified subscription. /// </summary> /// <remarks> /// Gets a list of container services in the specified subscription. /// The operation returns properties of each container service /// including state, orchestrator, number of masters and agents, and /// FQDNs of masters and agents. /// </remarks> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ContainerService>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); /// <summary> /// Gets a list of container services in the specified resource group. /// </summary> /// <remarks> /// Gets a list of container services in the specified subscription and /// resource group. The operation returns properties of each container /// service including state, orchestrator, number of masters and /// agents, and FQDNs of masters and agents. /// </remarks> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// The headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="Microsoft.Rest.Azure.CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="Microsoft.Rest.SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="Microsoft.Rest.ValidationException"> /// Thrown when a required parameter is null /// </exception> Task<AzureOperationResponse<IPage<ContainerService>>> ListByResourceGroupNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)); } }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading; using System.ComponentModel; // Core Reference Libraries using System.Diagnostics; using System.Globalization; using System.IO; using System.Windows; using System.Windows.Media; using System.Windows.Media.Imaging; // WebSocket Server using Fleck; // Kinect API using Microsoft.Kinect; // JSON Serializer using Newtonsoft.Json; namespace Quetzalcoatl { class Program { // Maintain a list of all clients connected to the server static List<IWebSocketConnection> allSockets = new List<IWebSocketConnection>(); static WebSocketServer server = new WebSocketServer("ws://localhost:1620"); // Debugging switch. Set to true for verbose output. static Boolean debug = true; static void Main(string[] args) { // Start the KinectService process in a minimized window. This is required for any Kinect application to run. Console.WriteLine("Starting KinectService.exe..."); ProcessStartInfo KinectService = new ProcessStartInfo(@"C:\Windows\System32\KinectService.exe"); KinectService.WindowStyle = ProcessWindowStyle.Minimized; Process KinectProcess = Process.Start(KinectService); server.Start(socket => { socket.OnOpen = () => { Console.WriteLine("Opening a new socket..."); // Add the socket address to the master list allSockets.Add(socket); Console.WriteLine("Initializing Kinect..."); // Initialize a new Kinect object Kinect alpha = new Kinect(); alpha.InitializeKinect(); }; socket.OnClose = () => { Console.WriteLine("Closing all connections..."); // Remove the socket record from the master list allSockets.Remove(socket); }; }); // Logic to allow for a soft quit of the server when "exit" is input at the CLI var input = Console.ReadLine(); if (input == "exit") { // Purge the KinectService process and close the console window KinectProcess.CloseMainWindow(); KinectProcess.Close(); // Close the main console window System.Environment.Exit(0); } else { while (input != "exit") { input = Console.ReadLine(); } } } public class Kinect { // Initialize a handler for the bodies the Kinect tracks private Body[] bodies = null; // Initialize a handler for the Kinect itself private KinectSensor kinectSensor = null; // Initialize a handler for the coordinate mapping function private CoordinateMapper coordinateMapper = null; // Initialize a buffer to handle the frames coming in from the Kinect private BodyFrameReader reader = null; private int rframecount = 0; private int lframecount = 0; private int startcount = 0; private int endcount = 0; private int zerrorcount = 0; private int perrorcount = 0; private int serrorcount = 0; private float zright = 0; private float zleft = 0; private float ssright = 0; private float ssleft = 0; private float lpulldist = 0; private float rpulldist = 0; private bool rpush = false; private bool lpush = false; private bool rpull = false; private bool lpull = false; private String lHandState = ""; private String rHandState = ""; private double zoominit = 0; private double zoomscale = 0; private double initX = 0; private double initY = 0; private double initZ = 0; private double theta = 0; private double phi = 0; private double rho = 0; private bool startStatement = false; private bool engaged = false; private ulong mainBodyId = 0; private Point[] lpos = new Point[6]; private Point[] rpos = new Point[6]; private string swipe = "none"; public void InitializeKinect() { // Initialize the Kinect itself. Since we're running the Dev API, there's support for only one Kinect. this.kinectSensor = KinectSensor.Default; if (this.kinectSensor != null) { Console.WriteLine("Kinect Initialized. Broadcasting..."); // Get the coordinate mapper // The Kinect depth sensor measures in millimeters. CoordinateMapper converts millimeters to pixels. this.coordinateMapper = this.kinectSensor.CoordinateMapper; // Initialize the depth sensor this.kinectSensor.Open(); // Retrieve the bodies that the Kinect detects this.bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount]; // Open the reader for the body frames this.reader = this.kinectSensor.BodyFrameSource.OpenReader(); if (this.reader != null) { // If the Kinect is connected properly, keep pulling frames from the sensor this.reader.FrameArrived += this.Reader_FrameArrived; } } } /// An instance handles the body frame data arriving from the sensor private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { BodyFrameReference frameReference = e.FrameReference; try { using (BodyFrame frame = frameReference.AcquireFrame()) { if (frame != null) { // Get the data for all the bodies detected by the Kinect frame.GetAndRefreshBodyData(this.bodies); // Extract only the bodies that are tracked var trackedBodies = this.bodies.Where(c => c.IsTracked).ToArray(); // Initialize a blank array for storing body IDs var bodyIdArray = new ulong[trackedBodies.Count()]; // Populate the ID array with the IDs of all tracked bodies for (int k = 0; k < trackedBodies.Count(); k++) { bodyIdArray[k] = trackedBodies[k].TrackingId; } // Set the body to be tracked. If the ID is empty, or if the last tracked body is not in the Kinect viewport, // the first body that is detected will be assigned to be tracked. Otherwise, it maintains priority of the // original body if (mainBodyId == 0 || Array.IndexOf(bodyIdArray, mainBodyId) == -1) { mainBodyId = bodyIdArray[0]; } foreach (Body body in this.bodies) { if (body.IsTracked && body.TrackingId == mainBodyId) { // Store all the joint data to a Dictionary for easy access later IReadOnlyDictionary<JointType, Joint> joints = body.Joints; // Invoke the coordinate mapper to convert the joint coordinates from millimeters to pixels Dictionary<JointType, Point> jointPoints = new Dictionary<JointType, Point>(); foreach (JointType jointType in joints.Keys) { DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(joints[jointType].Position); jointPoints[jointType] = new Point(depthSpacePoint.X, depthSpacePoint.Y); } // Define handles to each hand Point lHand = jointPoints[JointType.HandLeft]; Point rHand = jointPoints[JointType.HandRight]; //Define handles to each hand in depth space CameraSpacePoint lhanddepth = body.Joints[JointType.HandLeft].Position; CameraSpacePoint rhanddepth = body.Joints[JointType.HandRight].Position; // Define handles to each shoulder Point lShoulder = jointPoints[JointType.ShoulderLeft]; Point rShoulder = jointPoints[JointType.ShoulderRight]; // Define handles to each wrist Point lWrist = jointPoints[JointType.WristLeft]; Point rWrist = jointPoints[JointType.WristRight]; // Define handles to each wrist in depth space CameraSpacePoint lwristd = body.Joints[JointType.WristLeft].Position; CameraSpacePoint rwristd = body.Joints[JointType.WristRight].Position; // Define handles to each elbow Point lElbow = jointPoints[JointType.ElbowLeft]; Point rElbow = jointPoints[JointType.ElbowRight]; // Define handle to the base of the spine Point bSpine = jointPoints[JointType.SpineBase]; // Define handles to the neck and head Point neck = jointPoints[JointType.Neck]; Point head = jointPoints[JointType.Head]; if (engaged == true) { startStatement = false; for (int i = 4; i >= 0; i--) { rpos[i + 1] = rpos[i]; lpos[i + 1] = lpos[i]; } rpos[0] = rHand; lpos[0] = lHand; if (rpos[5].X != 0 && rpos[5].Y != 0) { if (serrorcount > 15) { swipe = CheckSwipe(rpos[0], rpos[5], lpos[0], lpos[5]); } } if (rpos[5].X == 0 && lpos[5].X == 0) { swipe = "none"; } if (!(swipe.Equals("none"))) { serrorcount = 0; Array.Clear(rpos, 0, rpos.Length); Array.Clear(lpos, 0, lpos.Length); } if (body.HandRightState == HandState.Closed) { if (this.rframecount >= 15) { rframecount = 0; } CheckPushPull(rhanddepth.Z, "right", rframecount, rpush, rpull, body.HandRightState); } else { rpush = false; rpull = false; } if (body.HandLeftState == HandState.Closed) { if (this.lframecount >= 15) { lframecount = 0; } CheckPushPull(lhanddepth.Z, "left", lframecount, lpush, lpull, body.HandLeftState); } else { lpush = false; lpull = false; } // Calculate the distance from the midpoint of the left elbow-wrist joint to the midpoint of the right elbow-wrist joint double width = (Math.Pow((Math.Pow(Math.Abs(rShoulder.X - rElbow.X), 2) + Math.Pow(Math.Abs(rShoulder.Y - rElbow.Y), 2)), 0.5) + (Math.Pow((Math.Pow(Math.Abs(rElbow.X - rWrist.X), 2) + Math.Pow(Math.Abs(rElbow.Y - rWrist.Y), 2)), 0.5) / 2)) * 2 + Math.Abs(rShoulder.X - lShoulder.X); // Calculate the distance from the base of the spine to the top of the head double height = Math.Abs(bSpine.Y - neck.Y) + 2 * (neck.Y - head.Y); //Calculate the inferred location of the hands double wristdist = Math.Pow((Math.Pow(rWrist.X - rElbow.X, 2) + Math.Pow(rWrist.Y - rElbow.Y, 2)), 0.5); double rangle = Math.Atan(rWrist.Y-rElbow.Y/rWrist.X-rElbow.X); double langle = Math.Atan(lWrist.Y-lElbow.Y/lWrist.X-lElbow.X); Point rpos2 = new Point(rWrist.X+wristdist*Math.Cos(rangle), rWrist.Y+wristdist*Math.Sin(rangle)); Point lpos2 = new Point(lWrist.X+wristdist*Math.Cos(langle), lWrist.Y+wristdist*Math.Sin(langle)); // Text-ify the left hand state if (lpull == true) { lHandState = "pull"; } else if (lpush == true) { lHandState = "push"; } else { switch (body.HandLeftState) { case HandState.Open: lHandState = "open"; break; case HandState.Closed: lHandState = "closed"; break; case HandState.Lasso: lHandState = "point"; break; default: break; } } // Text-ify the right hand state if (rpull == true) { rHandState = "pull"; } else if (rpush == true) { rHandState = "push"; } else { switch (body.HandRightState) { case HandState.Open: rHandState = "open"; break; case HandState.Closed: rHandState = "closed"; break; case HandState.Lasso: rHandState = "point"; break; default: break; } } // Create a JSON packet of all the data to be sent to the client string result = ""; switch (body.HandRightConfidence) { case TrackingConfidence.High: switch (body.HandLeftConfidence) { case TrackingConfidence.High: CheckZoom(body.HandLeftState, body.HandRightState, lhanddepth, rhanddepth, zerrorcount); result=MakeJson(rHandState, rHand, lHandState, lHand, bSpine, width, height, zoomscale, theta, phi, rho, swipe, lpulldist, rpulldist); break; case TrackingConfidence.Low: CheckZoom(body.HandLeftState, body.HandRightState, lwristd, rhanddepth, zerrorcount); result=MakeJson(rHandState, rHand, lHandState, lpos2, bSpine, width, height, zoomscale, theta, phi, rho, swipe, lpulldist, rpulldist); break; } break; case TrackingConfidence.Low: switch(body.HandLeftConfidence) { case TrackingConfidence.High: CheckZoom(body.HandLeftState, body.HandRightState, lhanddepth, rwristd, zerrorcount); result=MakeJson(rHandState, rpos2, lHandState, lHand, bSpine, width, height, zoomscale, theta, phi, rho, swipe, lpulldist, rpulldist); break; case TrackingConfidence.Low: CheckZoom(body.HandLeftState, body.HandRightState, lwristd, rwristd, zerrorcount); result = MakeJson(rHandState, rpos2, lHandState, lpos2, bSpine, width, height, zoomscale, theta, phi, rho, swipe, lpulldist, rpulldist); break; } break; } // Send the data to the client allSockets.ToList().ForEach(s => s.Send(result)); if (debug == true) { Console.WriteLine(result); } // Check if the user is in a start/stop position if (lHand.Y <= bSpine.Y && rHand.Y <= bSpine.Y) { endcount = 0; } this.CheckStartStop(body.Joints[JointType.HandRight].Position.Z, body.Joints[JointType.HandLeft].Position.Z, startcount, endcount, lHand, rHand, bSpine); // Increment the frame counts rframecount++; lframecount++; endcount++; perrorcount++; serrorcount++; } else { if (debug == true && startStatement == false) { Console.WriteLine("Awaiting start gesture..."); startStatement = true; } // Check if the user is in a start/stop position if (body.HandLeftState == HandState.Closed && body.HandRightState == HandState.Closed) { if (startcount > 45) { startcount = 0; } this.CheckStartStop(body.Joints[JointType.HandRight].Position.Z, body.Joints[JointType.HandLeft].Position.Z, startcount, endcount, lHand, rHand, bSpine); } startcount++; } } } } } } catch (Exception) { Console.WriteLine("Frame data unavailable..."); } } private void CheckStartStop(float rightz, float leftz, int startframe, int endframe, Point lHand, Point rHand, Point bSpine) { if (startframe == 0) { ssright = rightz; ssleft = leftz; } else if (rightz >= ssright + .102 && leftz >= ssleft + .102) { engaged = true; ssleft = 6; ssright = 6; } else if (lHand.Y >= bSpine.Y && rHand.Y >= bSpine.Y) { if (endframe > 150) { engaged = false; } } } private void CheckPushPull(float currentz, string parity, int framenum, bool currpush, bool currpull, HandState currstate) { if (parity == "right") { if (currpull == true && currstate == HandState.Closed) { rpull = true; rpulldist = currentz - zright; } if (currpush == true && currstate == HandState.Closed) { rpush = true; rpulldist = currentz - zright; } if (framenum == 0 && currpush==false && currpush==false) { zright = currentz; rpulldist = 0; } else if (currentz >= zright + .102) { rpull = true; } else if (currentz <= zright -.102) { rpush = true; } else if (currpull==false && currpush == false) { rpulldist = 0; } } if (parity == "left") { if (currpull == true && currstate == HandState.Closed) { lpull = true; lpulldist = currentz - zleft; } if (currpush == true && currstate == HandState.Closed) { lpush = true; lpulldist = currentz - zleft; } if (framenum == 0) { zleft = currentz; } else if (currentz >= zleft + .102) { lpull = true; } else if (currentz <= zleft - .102) { lpush = true; } else if (currpull == false && currpush == false) { lpulldist = 0; } } } private void CheckZoom(HandState lstate, HandState rstate, CameraSpacePoint lhd, CameraSpacePoint rhd, int errorcount) { if ((lstate == HandState.Closed && rstate == HandState.Closed)||(lstate==HandState.Closed && (rstate == HandState.Unknown || rstate == HandState.NotTracked))|| ((lstate==HandState.Unknown||lstate==HandState.NotTracked)&&rstate==HandState.Closed)) { if (zoominit == 0) { initX = rhd.X - lhd.X; initY = rhd.Y - lhd.Y; initZ = rhd.Z - lhd.Z; zoominit = Math.Pow((Math.Pow(Math.Abs(initX), 2) + Math.Pow(Math.Abs(initY), 2) + Math.Pow(Math.Abs(initZ), 2)), 0.5); } if (zoominit != 0) { double zoomcurr = Math.Pow((Math.Pow(Math.Abs(rhd.X - lhd.X), 2) + Math.Pow(Math.Abs(rhd.Y - lhd.Y), 2) + Math.Pow(Math.Abs(rhd.Z - lhd.Z), 2)), 0.5); zoomscale = 1 - (1 - zoomcurr / zoominit); double currX = rhd.X - lhd.X; double currY = rhd.Y - lhd.Y; double currZ = rhd.Z - lhd.Z; theta = 180 / Math.PI * Math.Acos((currX * initX + currY * initY) / (Math.Pow(Math.Pow(currX, 2) + Math.Pow(currY, 2), 0.5) * Math.Pow(Math.Pow(initX, 2) + Math.Pow(initY, 2), 0.5))); phi = 180 / Math.PI * Math.Acos((currY * initY + currZ * initZ) / (Math.Pow(Math.Pow(currY, 2) + Math.Pow(currZ, 2), 0.5) * Math.Pow(Math.Pow(initY, 2) + Math.Pow(initZ, 2), 0.5))); rho = 180 / Math.PI * Math.Acos((currX * initX + currZ * initZ) / (Math.Pow(Math.Pow(currX, 2) + Math.Pow(currZ, 2), 0.5) * Math.Pow(Math.Pow(initX, 2) + Math.Pow(initZ, 2), 0.5))); } } else { zoominit = 0; zoomscale = 0; theta = 0; phi = 0; rho = 0; } } private string CheckSwipe(Point rfirst, Point rlast, Point lfirst, Point llast) { if (rfirst.X > rlast.X + 85 || lfirst.X > llast.X + 85) { return "right"; } else if (rfirst.X < rlast.X - 85 || lfirst.X < llast.X - 85) { return "left"; } else if (rfirst.Y < rlast.Y - 85 || lfirst.Y < llast.Y - 85) { return "up"; } else if (rfirst.Y > rlast.Y + 85 || lfirst.Y > llast.Y + 85) { return "down"; } else { return "none"; } } /// An instance is a constructor for the JSON packet private class Packet { /// Right Hand Coordinates public double rx { get; set; } public double ry { get; set; } /// Left Hand Coordinates public double lx { get; set; } public double ly { get; set; } /// Left and Right Hand States public string rhandState { get; set; } public string lhandState { get; set; } ///Left and Right Pull Distances public float lpull { get; set; } public float rpull { get; set; } ///Spine Base Coordinates public double sx { get; set; } public double sy { get; set; } // User Viewport Dimensions public double screenw { get; set; } public double screenh { get; set; } // Zoom Scale public double scale { get; set; } //Rotation Angles public double XY { get; set; } public double YZ { get; set; } public double XZ { get; set; } //Swipe Value public string swipeval { get; set; } } /// An instance constructs a JSON from a list of parameters /// Input: Left and right hand coordinates, and left and right hand states /// Output: Formatted JSON packet public string MakeJson(String rightstate, Point rightpos, String leftstate, Point leftpos, Point spinebase, double width, double height, double zoom, double theta, double phi, double rho, string zoomdir, float leftpull, float rightpull) { Packet bodyData = new Packet { rx = Math.Round(rightpos.X), ry = Math.Round(rightpos.Y), lx = Math.Round(leftpos.X), ly = Math.Round(leftpos.Y), rhandState = rightstate, lhandState = leftstate, lpull = leftpull, rpull = rightpull, sx = Math.Round(spinebase.X), sy = Math.Round(spinebase.Y), screenw = Math.Round(width), screenh = Math.Round(height), scale = zoom, XY = theta, YZ = phi, XZ = rho, swipeval = zoomdir }; // Create a nicely formatted JSON from the hand object string json = JsonConvert.SerializeObject(bodyData, Formatting.Indented); return json; } } } }
// Script.cs // Script#/Libraries/CoreLib // This source code is subject to terms and conditions of the Apache License, Version 2.0. // using System.Collections.Generic; using System.Runtime.CompilerServices; namespace System { /// <summary> /// The Script class contains various methods that represent global /// methods present in the underlying script engine. /// </summary> [ScriptIgnoreNamespace] [ScriptImport] public static class Script { [ScriptField] [ScriptAlias("$global")] public static object Global { get { return null; } } [ScriptField] [ScriptAlias("ss.modules")] public static Dictionary<string, object> Modules { get { return null; } } [ScriptField] [ScriptAlias("undefined")] public static object Undefined { get { return null; } } /// <summary> /// Converts an object into a boolean. /// </summary> /// <param name="o">The object to convert.</param> /// <returns>true if the object is not null, zero, empty string or undefined.</returns> public static bool Boolean(object o) { return false; } [ScriptAlias("clearInterval")] public static void ClearInterval(int intervalID) { } [ScriptAlias("clearTimeout")] public static void ClearTimeout(int timeoutID) { } public static object CreateInstance(Type type, params object[] arguments) { return null; } public static void DeleteField(object instance, string name) { } public static void DeleteField(Type type, string name) { } /// <summary> /// Enables you to evaluate (or execute) an arbitrary script /// literal. This includes JSON literals, where the return /// value is the deserialized object graph. /// </summary> /// <param name="s">The script to be evaluated.</param> /// <returns>The result of the evaluation.</returns> [ScriptAlias("eval")] public static object Eval(string s) { return null; } public static object GetField(object instance, string name) { return null; } public static T GetField<T>(object instance, string name) { return default(T); } public static object GetField(Type type, string name) { return null; } public static T GetField<T>(Type type, string name) { return default(T); } public static string GetScriptType(object instance) { return null; } public static bool HasField(object instance, string name) { return false; } public static bool HasField(Type type, string name) { return false; } public static bool HasMethod(object instance, string name) { return false; } public static bool HasMethod(Type type, string name) { return false; } public static object InvokeMethod(object instance, string name, params object[] args) { return null; } public static T InvokeMethod<T>(object instance, string name, params object[] args) { return default(T); } public static object InvokeMethod(Type type, string name, params object[] args) { return null; } public static T InvokeMethod<T>(Type type, string name, params object[] args) { return default(T); } /// <summary> /// Checks if the specified object has a falsey value, i.e. it is null or /// undefined or empty string or false or zero. /// </summary> /// <param name="o">The object to test.</param> /// <returns>true if the object represents a falsey value; false otherwise.</returns> public static bool IsFalsey(object o) { return false; } [ScriptAlias("isFinite")] public static bool IsFinite(object o) { return false; } [ScriptAlias("isNaN")] public static bool IsNaN(object o) { return false; } /// <summary> /// Checks if the specified object is null. /// </summary> /// <param name="o">The object to test against null.</param> /// <returns>true if the object is null; false otherwise.</returns> [ScriptAlias("ss.isNull")] public static bool IsNull(object o) { return false; } /// <summary> /// Checks if the specified object is null or undefined. /// The object passed in should be a local variable, and not /// a member of a class (to avoid potential script warnings). /// </summary> /// <param name="o">The object to test against null or undefined.</param> /// <returns>true if the object is null or undefined; false otherwise.</returns> [ScriptAlias("ss.isNullOrUndefined")] public static bool IsNullOrUndefined(object o) { return false; } /// <summary> /// Checks if the specified object is undefined. /// The object passed in should be a local variable, and not /// a member of a class (to avoid potential script warnings). /// </summary> /// <param name="o">The object to test against undefined.</param> /// <returns>true if the object is undefined; false otherwise.</returns> [ScriptAlias("ss.isUndefined")] public static bool IsUndefined(object o) { return false; } /// <summary> /// Checks if the specified object has a value, i.e. it is not /// null or undefined. /// </summary> /// <param name="o">The object to test.</param> /// <returns>true if the object represents a value; false otherwise.</returns> [ScriptAlias("ss.isValue")] public static bool IsValue(object o) { return false; } /// <summary> /// Checks if the specified object has a truthy value, i.e. it is not /// null or undefined or empty string or false or zero. /// </summary> /// <param name="o">The object to test.</param> /// <returns>true if the object represents a truthy value; false otherwise.</returns> public static bool IsTruthy(object o) { return false; } /// <summary> /// Enables you to generate an arbitrary (literal) script expression. /// The script can contain simple String.Format style tokens (such as /// {0}, {1}, ...) to be replaced with the specified arguments. /// </summary> /// <param name="script">The script expression to be evaluated.</param> /// <param name="args">Optional arguments matching tokens in the script.</param> /// <returns>The result of the script expression.</returns> public static object Literal(string script, params object[] args) { return null; } /// <summary> /// Gets the first truthy (true, non-null, non-undefined, non-empty, non-zero) value. /// </summary> /// <typeparam name="TValue">The type of the value.</typeparam> /// <param name="value">The value to check for validity.</param> /// <param name="alternateValue">The alternate value to use if the first is invalid.</param> /// <param name="alternateValues">Additional alternative values to use if the first is invalid.</param> /// <returns>The first valid value.</returns> public static TValue Or<TValue>(TValue value, TValue alternateValue, params TValue[] alternateValues) { return default(TValue); } public static void SetField(object instance, string name, object value) { } public static void SetField(Type type, string name, object value) { } [ScriptAlias("setInterval")] public static int SetInterval(string code, int milliseconds) { return 0; } [ScriptAlias("setInterval")] public static int SetInterval(Action callback, int milliseconds) { return 0; } [ScriptAlias("setInterval")] public static int SetInterval<T>(Action<T> callback, int milliseconds, T arg) { return 0; } [ScriptAlias("setInterval")] public static int SetInterval<T1, T2>(Action<T1, T2> callback, int milliseconds, T1 arg1, T2 arg2) { return 0; } [ScriptAlias("setInterval")] public static int SetInterval(Delegate d, int milliseconds, params object[] args) { return 0; } [ScriptAlias("setTimeout")] public static int SetTimeout(string code, int milliseconds) { return 0; } [ScriptAlias("setTimeout")] public static int SetTimeout(Action callback, int milliseconds) { return 0; } [ScriptAlias("setTimeout")] public static int SetTimeout<T>(Action<T> callback, int milliseconds, T arg) { return 0; } [ScriptAlias("setTimeout")] public static int SetTimeout<T1, T2>(Action<T1, T2> callback, int milliseconds, T1 arg1, T2 arg2) { return 0; } [ScriptAlias("setTimeout")] public static int SetTimeout(Delegate d, int milliseconds, params object[] args) { return 0; } /// <summary> /// Gets the first non-null and non-undefined value. /// </summary> /// <typeparam name="TValue">The type of the value.</typeparam> /// <param name="value">The value to check for validity.</param> /// <param name="alternateValue">The alternate value to use if the first is invalid.</param> /// <param name="alternateValues">Additional alternative values to use if the first is invalid.</param> /// <returns>The first valid value.</returns> [ScriptAlias("ss.value")] public static TValue Value<TValue>(TValue value, TValue alternateValue, params TValue[] alternateValues) { return default(TValue); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract001.extract001 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract001.extract001; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { List<dynamic> myList = new List<dynamic>() { new A(), new B()} ; int i = 1; foreach (var item in myList) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract002.extract002 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract002.extract002; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { List<object> myList = new List<object>() { new A(), new B()} ; int i = 1; foreach (dynamic item in myList) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract003.extract003 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract003.extract003; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { List<dynamic> myList = new List<object>() { new A(), new B()} ; int i = 1; foreach (var item in myList) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract004.extract004 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract004.extract004; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Foo { public List<dynamic> GetList() { return new List<dynamic>() { new A(), new B()} ; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); int i = 1; foreach (var item in f.GetList()) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract005.extract005 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract005.extract005; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Foo { public List<dynamic> GetList() { return new List<object>() { new A(), new B()} ; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); int i = 1; foreach (var item in f.GetList()) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract006.extract006 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract006.extract006; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Foo { public IEnumerable<dynamic> GetList() { return new List<dynamic>() { new A(), new B()} ; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); int i = 1; foreach (var item in f.GetList()) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract007.extract007 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract007.extract007; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Foo { public IEnumerable<dynamic> GetList() { return new List<object>() { new A(), new B()} ; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); int i = 1; foreach (var item in f.GetList()) { item.Foo(); if (i++ != Test.Status) return 1; } return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract008.extract008 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract008.extract008; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class Foo { public List<Dictionary<string, List<dynamic>>> GetSomething() { var list = new List<Dictionary<string, List<dynamic>>>(); var dict = new Dictionary<string, List<dynamic>>(); dict.Add("Test", new List<dynamic>() { new A()} ); list.Add(dict); return list; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); var list = f.GetSomething(); list[0]["Test"][0].Foo(); if (Test.Status != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract009.extract009 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract009.extract009; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class Foo { public List<Dictionary<string, List<dynamic>>> GetSomething() { var list = new List<Dictionary<string, List<object>>>(); var dict = new Dictionary<string, List<object>>(); dict.Add("Test", new List<object>() { new A()} ); list.Add(dict); return list; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); var list = f.GetSomething(); list[0]["Test"][0].Foo(); if (Test.Status != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract010.extract010 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract010.extract010; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class Foo { public List<Dictionary<dynamic, Dictionary<string, List<dynamic>>>> GetSomething() { var list = new List<Dictionary<dynamic, Dictionary<string, List<dynamic>>>>(); var dict = new Dictionary<dynamic, Dictionary<string, List<dynamic>>>(); var dict2 = new Dictionary<string, List<dynamic>>(); list.Add(dict); dict.Add("bar", dict2); dict2.Add("foo", new List<dynamic>() { new A(), new B()} ); return list; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); var list = f.GetSomething(); list[0]["bar"]["foo"][0].Foo(); if (Test.Status != 1) return 1; return 0; } } // </Code> } namespace ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract011.extract011 { using ManagedTests.DynamicCSharp.Conformance.dynamic.dynamicType.generics.extractDynamic.extract011.extract011; // <Title>Extract a dynamic element from a generic type</Title> // <Description> // </Description> // <RelatedBugs></RelatedBugs> //<Expects Status=success></Expects> // <Code> using System.Collections.Generic; public class A { public void Foo() { Test.Status = 1; } } public class B { public void Foo() { Test.Status = 2; } } public class C<T> { public T t; public T getT() { return t; } public void Add(T tt) { t = tt; } } public class D<T, U> { public T t; public U u; public T getT() { return t; } public U getU() { return u; } public void Add(T tt, U uu) { t = tt; u = uu; } } public class Foo { public C<D<dynamic, D<string, C<dynamic>>>> Get() { var list = new C<D<dynamic, D<string, C<dynamic>>>>(); var dict = new D<dynamic, D<string, C<dynamic>>>(); var dict2 = new D<string, C<dynamic>>(); var test = new C<dynamic>(); test.Add(new A()); list.Add(dict); dict.Add("bar", dict2); dict2.Add("foo", test); return list; } } public class Test { public static int Status; [Fact] public static void DynamicCSharpRunTest() { Assert.Equal(0, MainMethod(null)); } public static int MainMethod(string[] args) { Foo f = new Foo(); var list = f.Get(); list.getT().getU().getU().getT().Foo(); if (Test.Status != 1) return 1; return 0; } } // </Code> }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using System; using Avalonia.Controls.Primitives; using Avalonia.Controls.Templates; using Avalonia.Controls.Utils; using Avalonia.Layout; using Avalonia.LogicalTree; using Avalonia.Media; using Avalonia.Metadata; namespace Avalonia.Controls.Presenters { /// <summary> /// Presents a single item of data inside a <see cref="TemplatedControl"/> template. /// </summary> public class ContentPresenter : Control, IContentPresenter { /// <summary> /// Defines the <see cref="Background"/> property. /// </summary> public static readonly StyledProperty<IBrush> BackgroundProperty = Border.BackgroundProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="BorderBrush"/> property. /// </summary> public static readonly AvaloniaProperty<IBrush> BorderBrushProperty = Border.BorderBrushProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="BorderThickness"/> property. /// </summary> public static readonly StyledProperty<Thickness> BorderThicknessProperty = Border.BorderThicknessProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="CornerRadius"/> property. /// </summary> public static readonly StyledProperty<CornerRadius> CornerRadiusProperty = Border.CornerRadiusProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="Child"/> property. /// </summary> public static readonly DirectProperty<ContentPresenter, IControl> ChildProperty = AvaloniaProperty.RegisterDirect<ContentPresenter, IControl>( nameof(Child), o => o.Child); /// <summary> /// Defines the <see cref="Content"/> property. /// </summary> public static readonly StyledProperty<object> ContentProperty = ContentControl.ContentProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="ContentTemplate"/> property. /// </summary> public static readonly StyledProperty<IDataTemplate> ContentTemplateProperty = ContentControl.ContentTemplateProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="HorizontalContentAlignment"/> property. /// </summary> public static readonly StyledProperty<HorizontalAlignment> HorizontalContentAlignmentProperty = ContentControl.HorizontalContentAlignmentProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="VerticalContentAlignment"/> property. /// </summary> public static readonly StyledProperty<VerticalAlignment> VerticalContentAlignmentProperty = ContentControl.VerticalContentAlignmentProperty.AddOwner<ContentPresenter>(); /// <summary> /// Defines the <see cref="Padding"/> property. /// </summary> public static readonly StyledProperty<Thickness> PaddingProperty = Decorator.PaddingProperty.AddOwner<ContentPresenter>(); private IControl _child; private bool _createdChild; private IDataTemplate _dataTemplate; private readonly BorderRenderHelper _borderRenderer = new BorderRenderHelper(); /// <summary> /// Initializes static members of the <see cref="ContentPresenter"/> class. /// </summary> static ContentPresenter() { AffectsRender<ContentPresenter>(BackgroundProperty, BorderBrushProperty, BorderThicknessProperty, CornerRadiusProperty); AffectsMeasure<ContentPresenter>(BorderThicknessProperty, PaddingProperty); ContentProperty.Changed.AddClassHandler<ContentPresenter>(x => x.ContentChanged); ContentTemplateProperty.Changed.AddClassHandler<ContentPresenter>(x => x.ContentChanged); TemplatedParentProperty.Changed.AddClassHandler<ContentPresenter>(x => x.TemplatedParentChanged); } /// <summary> /// Gets or sets a brush with which to paint the background. /// </summary> public IBrush Background { get { return GetValue(BackgroundProperty); } set { SetValue(BackgroundProperty, value); } } /// <summary> /// Gets or sets a brush with which to paint the border. /// </summary> public IBrush BorderBrush { get { return GetValue(BorderBrushProperty); } set { SetValue(BorderBrushProperty, value); } } /// <summary> /// Gets or sets the thickness of the border. /// </summary> public Thickness BorderThickness { get { return GetValue(BorderThicknessProperty); } set { SetValue(BorderThicknessProperty, value); } } /// <summary> /// Gets or sets the radius of the border rounded corners. /// </summary> public CornerRadius CornerRadius { get { return GetValue(CornerRadiusProperty); } set { SetValue(CornerRadiusProperty, value); } } /// <summary> /// Gets the control displayed by the presenter. /// </summary> public IControl Child { get { return _child; } private set { SetAndRaise(ChildProperty, ref _child, value); } } /// <summary> /// Gets or sets the content to be displayed by the presenter. /// </summary> [DependsOn(nameof(ContentTemplate))] public object Content { get { return GetValue(ContentProperty); } set { SetValue(ContentProperty, value); } } /// <summary> /// Gets or sets the data template used to display the content of the control. /// </summary> public IDataTemplate ContentTemplate { get { return GetValue(ContentTemplateProperty); } set { SetValue(ContentTemplateProperty, value); } } /// <summary> /// Gets or sets the horizontal alignment of the content within the border the control. /// </summary> public HorizontalAlignment HorizontalContentAlignment { get { return GetValue(HorizontalContentAlignmentProperty); } set { SetValue(HorizontalContentAlignmentProperty, value); } } /// <summary> /// Gets or sets the vertical alignment of the content within the border of the control. /// </summary> public VerticalAlignment VerticalContentAlignment { get { return GetValue(VerticalContentAlignmentProperty); } set { SetValue(VerticalContentAlignmentProperty, value); } } /// <summary> /// Gets or sets the space between the border and the <see cref="Child"/> control. /// </summary> public Thickness Padding { get { return GetValue(PaddingProperty); } set { SetValue(PaddingProperty, value); } } /// <inheritdoc/> public sealed override void ApplyTemplate() { if (!_createdChild && ((ILogical)this).IsAttachedToLogicalTree) { UpdateChild(); } } /// <summary> /// Updates the <see cref="Child"/> control based on the control's <see cref="Content"/>. /// </summary> /// <remarks> /// Usually the <see cref="Child"/> control is created automatically when /// <see cref="ApplyTemplate"/> is called; however for this to happen, the control needs to /// be attached to a logical tree (if the control is not attached to the logical tree, it /// is reasonable to expect that the DataTemplates needed for the child are not yet /// available). This method forces the <see cref="Child"/> control's creation at any point, /// and is particularly useful in unit tests. /// </remarks> public void UpdateChild() { var content = Content; var oldChild = Child; var newChild = CreateChild(); // Remove the old child if we're not recycling it. if (oldChild != null && newChild != oldChild) { VisualChildren.Remove(oldChild); } // Set the DataContext if the data isn't a control. if (!(content is IControl)) { DataContext = content; } else { ClearValue(DataContextProperty); } // Update the Child. if (newChild == null) { Child = null; } else if (newChild != oldChild) { ((ISetInheritanceParent)newChild).SetParent(this); Child = newChild; if (oldChild?.Parent == this) { LogicalChildren.Remove(oldChild); } if (newChild.Parent == null && TemplatedParent == null) { LogicalChildren.Add(newChild); } VisualChildren.Add(newChild); } _createdChild = true; } /// <inheritdoc/> protected override void OnAttachedToLogicalTree(LogicalTreeAttachmentEventArgs e) { base.OnAttachedToLogicalTree(e); _dataTemplate = null; _createdChild = false; InvalidateMeasure(); } /// <inheritdoc/> public override void Render(DrawingContext context) { _borderRenderer.Render(context, Bounds.Size, BorderThickness, CornerRadius, Background, BorderBrush); } /// <summary> /// Creates the child control. /// </summary> /// <returns>The child control or null.</returns> protected virtual IControl CreateChild() { var content = Content; var oldChild = Child; var newChild = content as IControl; if (content != null && newChild == null) { var dataTemplate = this.FindDataTemplate(content, ContentTemplate) ?? FuncDataTemplate.Default; // We have content and it isn't a control, so if the new data template is the same // as the old data template, try to recycle the existing child control to display // the new data. if (dataTemplate == _dataTemplate && dataTemplate.SupportsRecycling) { newChild = oldChild; } else { _dataTemplate = dataTemplate; newChild = _dataTemplate.Build(content); // Give the new control its own name scope. if (newChild is Control controlResult) { NameScope.SetNameScope(controlResult, new NameScope()); } } } else { _dataTemplate = null; } return newChild; } /// <inheritdoc/> protected override Size MeasureOverride(Size availableSize) { return LayoutHelper.MeasureChild(Child, availableSize, Padding, BorderThickness); } /// <inheritdoc/> protected override Size ArrangeOverride(Size finalSize) { _borderRenderer.Update(finalSize, BorderThickness, CornerRadius); return ArrangeOverrideImpl(finalSize, new Vector()); } internal Size ArrangeOverrideImpl(Size finalSize, Vector offset) { if (Child == null) return finalSize; var padding = Padding + BorderThickness; var horizontalContentAlignment = HorizontalContentAlignment; var verticalContentAlignment = VerticalContentAlignment; var useLayoutRounding = UseLayoutRounding; var availableSize = finalSize; var sizeForChild = availableSize; var scale = GetLayoutScale(); var originX = offset.X; var originY = offset.Y; if (horizontalContentAlignment != HorizontalAlignment.Stretch) { sizeForChild = sizeForChild.WithWidth(Math.Min(sizeForChild.Width, DesiredSize.Width)); } if (verticalContentAlignment != VerticalAlignment.Stretch) { sizeForChild = sizeForChild.WithHeight(Math.Min(sizeForChild.Height, DesiredSize.Height)); } if (useLayoutRounding) { sizeForChild = new Size( Math.Ceiling(sizeForChild.Width * scale) / scale, Math.Ceiling(sizeForChild.Height * scale) / scale); availableSize = new Size( Math.Ceiling(availableSize.Width * scale) / scale, Math.Ceiling(availableSize.Height * scale) / scale); } switch (horizontalContentAlignment) { case HorizontalAlignment.Center: originX += (availableSize.Width - sizeForChild.Width) / 2; break; case HorizontalAlignment.Right: originX += availableSize.Width - sizeForChild.Width; break; } switch (verticalContentAlignment) { case VerticalAlignment.Center: originY += (availableSize.Height - sizeForChild.Height) / 2; break; case VerticalAlignment.Bottom: originY += availableSize.Height - sizeForChild.Height; break; } if (useLayoutRounding) { originX = Math.Floor(originX * scale) / scale; originY = Math.Floor(originY * scale) / scale; } var boundsForChild = new Rect(originX, originY, sizeForChild.Width, sizeForChild.Height).Deflate(padding); Child.Arrange(boundsForChild); return finalSize; } /// <summary> /// Called when the <see cref="Content"/> property changes. /// </summary> /// <param name="e">The event args.</param> private void ContentChanged(AvaloniaPropertyChangedEventArgs e) { _createdChild = false; if (((ILogical)this).IsAttachedToLogicalTree) { UpdateChild(); } else if (Child != null) { VisualChildren.Remove(Child); LogicalChildren.Remove(Child); Child = null; _dataTemplate = null; } InvalidateMeasure(); } private double GetLayoutScale() { var result = (VisualRoot as ILayoutRoot)?.LayoutScaling ?? 1.0; if (result == 0 || double.IsNaN(result) || double.IsInfinity(result)) { throw new Exception($"Invalid LayoutScaling returned from {VisualRoot.GetType()}"); } return result; } private void TemplatedParentChanged(AvaloniaPropertyChangedEventArgs e) { (e.NewValue as IContentPresenterHost)?.RegisterContentPresenter(this); } } }
//////////////////////////////////////////////////////////////////////////////// // Gtk GLWidget Sharp - Gtk OpenGL Widget for CSharp using OpenTK //////////////////////////////////////////////////////////////////////////////// /* Usage: To render either override OnRenderFrame() or hook to the RenderFrame event. When GraphicsContext.ShareContexts == True (Default) To setup OpenGL state hook to the following events: GLWidget.GraphicsContextInitialized GLWidget.GraphicsContextShuttingDown When GraphicsContext.ShareContexts == False To setup OpenGL state hook to the following events: GLWidget.Initialized GLWidget.ShuttingDown */ //////////////////////////////////////////////////////////////////////////////// using System; using System.Collections.Generic; using System.ComponentModel; using System.Runtime.InteropServices; using System.Security; using System.Threading; using Eto.Drawing; using Eto.GtkSharp; using Gtk; using OpenTK; using OpenTK.Graphics; using OpenTK.Graphics.OpenGL; using OpenTK.Platform; namespace Eto.OpenTK.Gtk { [ToolboxItem (true)] public class GLDrawingArea : DrawingArea, IDisposable { IGraphicsContext graphicsContext; static int graphicsContextCount; const string macos_libgdk_name = "libgdk-quartz-2.0.0.dylib"; const string linux_libx11_name = "libX11.so.6"; const string linux_libgdk_x11_name = "libgdk-x11-2.0.so.0"; const string linux_libgl_name = "libGL.so.1"; const string libgdk_name = "libgdk-win32-2.0-0.dll"; const string libX11_name = "libX11"; /// <summary>Use a single buffer versus a double buffer.</summary> [Browsable (true)] public bool SingleBuffer { get; set; } /// <summary>Color Buffer Bits-Per-Pixel</summary> public int ColorBPP { get; set; } /// <summary>Accumulation Buffer Bits-Per-Pixel</summary> public int AccumulatorBPP { get; set; } /// <summary>Depth Buffer Bits-Per-Pixel</summary> public int DepthBPP { get; set; } /// <summary>Stencil Buffer Bits-Per-Pixel</summary> public int StencilBPP { get; set; } /// <summary>Number of samples</summary> public int Samples { get; set; } /// <summary>Indicates if steropic renderering is enabled</summary> public bool Stereo { get; set; } IWindowInfo windowInfo; /// <summary>The major version of OpenGL to use.</summary> public int GlVersionMajor { get; set; } /// <summary>The minor version of OpenGL to use.</summary> public int GlVersionMinor { get; set; } private Size size; /// <summary> /// Gets or sets the context size. /// </summary> /// <value>The width.</value> public virtual Size GLSize { get { return Visible ? Allocation.Size.ToEto () : size; } set { if (size != value) { size = value; var alloc = Allocation; alloc.Size = value.ToGdk (); SetSizeRequest (size.Width, size.Height); } } } bool initialized = false; public virtual bool IsInitialized { get { return initialized; } } public GraphicsContextFlags GraphicsContextFlags { get { return graphicsContextFlags; } set { graphicsContextFlags = value; } } GraphicsContextFlags graphicsContextFlags; /// <summary>Constructs a new GLWidget.</summary> public GLDrawingArea () : this (GraphicsMode.Default) { } /// <summary>Constructs a new GLWidget using a given GraphicsMode</summary> public GLDrawingArea (GraphicsMode graphicsMode) : this (graphicsMode, 3, 0, GraphicsContextFlags.Default) { } /// <summary>Constructs a new GLWidget</summary> public GLDrawingArea (GraphicsMode graphicsMode, int glVersionMajor, int glVersionMinor, GraphicsContextFlags graphicsContextFlags) { this.DoubleBuffered = false; CanFocus = true; SingleBuffer = graphicsMode.Buffers == 1; ColorBPP = graphicsMode.ColorFormat.BitsPerPixel; AccumulatorBPP = graphicsMode.AccumulatorFormat.BitsPerPixel; DepthBPP = graphicsMode.Depth; StencilBPP = graphicsMode.Stencil; Samples = graphicsMode.Samples; Stereo = graphicsMode.Stereo; GlVersionMajor = glVersionMajor; GlVersionMinor = glVersionMinor; GraphicsContextFlags = graphicsContextFlags; } ~GLDrawingArea () { Dispose (false); } public override void Dispose () { GC.SuppressFinalize (this); Dispose (true); base.Dispose (); } public virtual void Dispose (bool disposing) { if (disposing) { graphicsContext.MakeCurrent (windowInfo); OnShuttingDown (); if (GraphicsContext.ShareContexts && (Interlocked.Decrement (ref graphicsContextCount) == 0)) { OnGraphicsContextShuttingDown (); sharedContextInitialized = false; } graphicsContext.Dispose (); } } public virtual void MakeCurrent () { if (!initialized) { return; } graphicsContext.MakeCurrent (windowInfo); } public virtual void SwapBuffers () { if (!initialized) { return; } Display.Flush (); graphicsContext.SwapBuffers (); Display.Sync (); } // Called when the first GraphicsContext is created in the case of GraphicsContext.ShareContexts == True; public static event EventHandler GraphicsContextInitialized; static void OnGraphicsContextInitialized () { GraphicsContextInitialized?.Invoke (null, EventArgs.Empty); } // Called when the first GraphicsContext is being destroyed in the case of GraphicsContext.ShareContexts == True; public static event EventHandler GraphicsContextShuttingDown; static void OnGraphicsContextShuttingDown () { GraphicsContextShuttingDown?.Invoke (null, EventArgs.Empty); } // Called when this GLWidget has a valid GraphicsContext public event EventHandler Initialized; protected virtual void OnInitialized () { Initialized?.Invoke (this, EventArgs.Empty); } // Called when this GLWidget needs to render a frame public event EventHandler Resize; protected virtual void OnResize () { Resize?.Invoke (this, EventArgs.Empty); } // Called when this GLWidget is being Disposed public event EventHandler ShuttingDown; protected virtual void OnShuttingDown () { ShuttingDown?.Invoke (this, EventArgs.Empty); } static bool sharedContextInitialized = false; void InitializeContext() { Toolkit.Init(); // If this looks uninitialized... initialize. if (ColorBPP == 0) { ColorBPP = 24; if (DepthBPP == 0) DepthBPP = 16; } ColorFormat colorBufferColorFormat = new ColorFormat (ColorBPP); ColorFormat accumulationColorFormat = new ColorFormat (AccumulatorBPP); int buffers = 2; if (SingleBuffer) buffers--; var graphicsMode = new GraphicsMode (colorBufferColorFormat, DepthBPP, StencilBPP, Samples, accumulationColorFormat, buffers, Stereo); // IWindowInfo if (Configuration.RunningOnWindows) { IntPtr windowHandle = gdk_win32_drawable_get_handle (GdkWindow.Handle); windowInfo = Utilities.CreateWindowsWindowInfo (windowHandle); } else if (Configuration.RunningOnMacOS) { /* doesn't seem to work.. takes up whole window for now, but better than just crashing? if (!gdk_window_ensure_native(GdkWindow.Handle)) throw new InvalidOperationException("Couldn't create native NSView"); */ IntPtr windowHandle = gdk_quartz_window_get_nswindow(GdkWindow.Handle); IntPtr viewHandle = gdk_quartz_window_get_nsview (GdkWindow.Handle); windowInfo = Utilities.CreateMacOSWindowInfo(windowHandle, viewHandle); } else if (Configuration.RunningOnX11) { IntPtr display = gdk_x11_display_get_xdisplay (Display.Handle); int screen = Screen.Number; IntPtr windowHandle = gdk_x11_drawable_get_xid (GdkWindow.Handle); IntPtr rootWindow = gdk_x11_drawable_get_xid (RootWindow.Handle); IntPtr visualInfo; if (graphicsMode.Index.HasValue) { XVisualInfo info = new XVisualInfo (); info.VisualID = graphicsMode.Index.Value; int dummy; visualInfo = XGetVisualInfo (display, XVisualInfoMask.ID, ref info, out dummy); } else { visualInfo = GetVisualInfo (display); } windowInfo = Utilities.CreateX11WindowInfo (display, screen, windowHandle, rootWindow, visualInfo); XFree (visualInfo); } else throw new PlatformNotSupportedException (); // GraphicsContext graphicsContext = new GraphicsContext (graphicsMode, windowInfo, GlVersionMajor, GlVersionMinor, graphicsContextFlags); graphicsContext.MakeCurrent (windowInfo); if (GraphicsContext.ShareContexts) { Interlocked.Increment (ref graphicsContextCount); if (!sharedContextInitialized) { sharedContextInitialized = true; ((IGraphicsContextInternal)graphicsContext).LoadAll (); OnGraphicsContextInitialized (); } } else { ((IGraphicsContextInternal)graphicsContext).LoadAll (); OnGraphicsContextInitialized (); } initialized = true; OnInitialized (); //QueueDraw(); } // Called when the widget needs to be (fully or partially) redrawn. protected override bool OnExposeEvent(Gdk.EventExpose eventExpose) { if (!initialized) { // initializing during the expose event crashes in ubuntu 15.10 global::Gtk.Application.Invoke((sender, e) => InitializeContext()); return base.OnExposeEvent(eventExpose); } else { MakeCurrent(); } bool result = base.OnExposeEvent(eventExpose); if (Configuration.RunningOnMacOS) { //GL.Viewport(Allocation.Left, Allocation.Top, Allocation.Width, Allocation.Height); //GL.MatrixMode(MatrixMode.Projection); //GL.LoadIdentity(); //GL.Ortho(-1.0, 1.0, -1.0, 1.0, 0.0, 4.0); } OnResize(); eventExpose.Window.Display.Sync(); // Add Sync call to fix resize rendering problem (Jay L. T. Cornwall) - How does this affect VSync? return result; } protected override bool OnConfigureEvent (Gdk.EventConfigure evnt) { bool result = base.OnConfigureEvent (evnt); graphicsContext?.Update (windowInfo); return result; } public enum XVisualClass : int { StaticGray = 0, GrayScale = 1, StaticColor = 2, PseudoColor = 3, TrueColor = 4, DirectColor = 5, } [StructLayout (LayoutKind.Sequential)] struct XVisualInfo { public IntPtr Visual; public IntPtr VisualID; public int Screen; public int Depth; public XVisualClass Class; public long RedMask; public long GreenMask; public long blueMask; public int ColormapSize; public int BitsPerRgb; public override string ToString () { return String.Format ("id ({0}), screen ({1}), depth ({2}), class ({3})", VisualID, Screen, Depth, Class); } } [Flags] internal enum XVisualInfoMask { No = 0x0, ID = 0x1, Screen = 0x2, Depth = 0x4, Class = 0x8, Red = 0x10, Green = 0x20, Blue = 0x40, ColormapSize = 0x80, BitsPerRGB = 0x100, All = 0x1FF, } [DllImport (libX11_name, EntryPoint = "XGetVisualInfo")] static extern IntPtr XGetVisualInfoInternal (IntPtr display, IntPtr vinfo_mask, ref XVisualInfo template, out int nitems); static IntPtr XGetVisualInfo (IntPtr display, XVisualInfoMask vinfo_mask, ref XVisualInfo template, out int nitems) { return XGetVisualInfoInternal (display, (IntPtr)(int)vinfo_mask, ref template, out nitems); } [SuppressUnmanagedCodeSecurity, DllImport (libgdk_name, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr gdk_win32_drawable_get_handle (IntPtr d); [SuppressUnmanagedCodeSecurity, DllImport (linux_libx11_name)] static extern void XFree (IntPtr handle); /// <summary> Returns the X resource (window or pixmap) belonging to a GdkDrawable. </summary> /// <remarks> XID gdk_x11_drawable_get_xid(GdkDrawable *drawable); </remarks> /// <param name="gdkDisplay"> The GdkDrawable. </param> /// <returns> The ID of drawable's X resource. </returns> [SuppressUnmanagedCodeSecurity, DllImport (linux_libgdk_x11_name)] static extern IntPtr gdk_x11_drawable_get_xid (IntPtr gdkDisplay); /// <summary> Returns the X display of a GdkDisplay. </summary> /// <remarks> Display* gdk_x11_display_get_xdisplay(GdkDisplay *display); </remarks> /// <param name="gdkDisplay"> The GdkDrawable. </param> /// <returns> The X Display of the GdkDisplay. </returns> [SuppressUnmanagedCodeSecurity, DllImport (linux_libgdk_x11_name)] static extern IntPtr gdk_x11_display_get_xdisplay (IntPtr gdkDisplay); [SuppressUnmanagedCodeSecurity, DllImport (linux_libgl_name)] static extern IntPtr glXChooseVisual (IntPtr display, int screen, int[] attr); [SuppressUnmanagedCodeSecurity, DllImport(macos_libgdk_name, CallingConvention = CallingConvention.Cdecl)] static extern IntPtr gdk_quartz_window_get_nsview(IntPtr d); [SuppressUnmanagedCodeSecurity, DllImport(macos_libgdk_name, CallingConvention = CallingConvention.Cdecl)] static extern IntPtr gdk_quartz_window_get_nswindow(IntPtr d); [SuppressUnmanagedCodeSecurity, DllImport(macos_libgdk_name, CallingConvention = CallingConvention.Cdecl)] static extern bool gdk_window_ensure_native(IntPtr window); IntPtr GetVisualInfo (IntPtr display) { try { int[] attributes = AttributeList.ToArray (); return glXChooseVisual (display, Screen.Number, attributes); } catch (DllNotFoundException e) { throw new DllNotFoundException ("OpenGL dll not found!", e); } catch (EntryPointNotFoundException enf) { throw new EntryPointNotFoundException ("Glx entry point not found!", enf); } } const int GLX_NONE = 0; const int GLX_USE_GL = 1; const int GLX_BUFFER_SIZE = 2; const int GLX_LEVEL = 3; const int GLX_RGBA = 4; const int GLX_DOUBLEBUFFER = 5; const int GLX_STEREO = 6; const int GLX_AUX_BUFFERS = 7; const int GLX_RED_SIZE = 8; const int GLX_GREEN_SIZE = 9; const int GLX_BLUE_SIZE = 10; const int GLX_ALPHA_SIZE = 11; const int GLX_DEPTH_SIZE = 12; const int GLX_STENCIL_SIZE = 13; const int GLX_ACCUM_RED_SIZE = 14; const int GLX_ACCUM_GREEN_SIZE = 15; const int GLX_ACCUM_BLUE_SIZE = 16; const int GLX_ACCUM_ALPHA_SIZE = 17; List<int> AttributeList { get { List<int> attributeList = new List<int> (24); attributeList.Add (GLX_RGBA); if (!SingleBuffer) attributeList.Add (GLX_DOUBLEBUFFER); if (Stereo) attributeList.Add (GLX_STEREO); attributeList.Add (GLX_RED_SIZE); attributeList.Add (ColorBPP / 4); // TODO support 16-bit attributeList.Add (GLX_GREEN_SIZE); attributeList.Add (ColorBPP / 4); // TODO support 16-bit attributeList.Add (GLX_BLUE_SIZE); attributeList.Add (ColorBPP / 4); // TODO support 16-bit attributeList.Add (GLX_ALPHA_SIZE); attributeList.Add (ColorBPP / 4); // TODO support 16-bit attributeList.Add (GLX_DEPTH_SIZE); attributeList.Add (DepthBPP); attributeList.Add (GLX_STENCIL_SIZE); attributeList.Add (StencilBPP); //attributeList.Add(GLX_AUX_BUFFERS); //attributeList.Add(Buffers); attributeList.Add (GLX_ACCUM_RED_SIZE); attributeList.Add (AccumulatorBPP / 4);// TODO support 16-bit attributeList.Add (GLX_ACCUM_GREEN_SIZE); attributeList.Add (AccumulatorBPP / 4);// TODO support 16-bit attributeList.Add (GLX_ACCUM_BLUE_SIZE); attributeList.Add (AccumulatorBPP / 4);// TODO support 16-bit attributeList.Add (GLX_ACCUM_ALPHA_SIZE); attributeList.Add (AccumulatorBPP / 4);// TODO support 16-bit attributeList.Add (GLX_NONE); return attributeList; } } } }
using UnityEngine; using System.Collections.Generic; [ExecuteInEditMode] [AddComponentMenu("Space Graphics Toolkit/SGT Jovian")] public class SgtJovian : MonoBehaviour { public static List<SgtJovian> AllJovians = new List<SgtJovian>(); public List<Light> Lights = new List<Light>(); public List<SgtShadow> Shadows = new List<SgtShadow>(); public float MeshRadius = 1.0f; public List<Mesh> Meshes = new List<Mesh>(); public Color Color = Color.white; public float Brightness = 1.0f; public SgtRenderQueue RenderQueue = SgtRenderQueue.Transparent; public int RenderQueueOffset; public bool Smooth = true; public bool Scattering; [SgtRangeAttribute(0.0f, 5.0f)] public float MieSharpness = 2.0f; [SgtRangeAttribute(0.0f, 10.0f)] public float MieStrength = 1.0f; public bool LimitAlpha = true; public Cubemap MainTex; public float Power = 3.0f; public float Density = 10.0f; public SgtOutputMode DensityMode; public Gradient LightingBrightness = new Gradient(); public Gradient LightingColor = new Gradient(); public Gradient RimColor = new Gradient(); private bool lutDirty = true; private int lightCount; private int shadowCount; [SerializeField] private bool awakeCalled; [System.NonSerialized] private Material material; [System.NonSerialized] private Texture2D lightingLut; [System.NonSerialized] private Texture2D rimLut; [SerializeField] private List<SgtJovianModel> models = new List<SgtJovianModel>(); private static List<string> keywords = new List<string>(); private static GradientColorKey[] defaultLightingBrightness = new GradientColorKey[] { new GradientColorKey(Color.black, 0.4f), new GradientColorKey(Color.white, 0.6f) }; private static GradientColorKey[] defaultLightingColor = new GradientColorKey[] { new GradientColorKey(Color.red, 0.25f), new GradientColorKey(Color.white, 0.5f) }; private static GradientColorKey[] defaultRimColor = new GradientColorKey[] { new GradientColorKey(Color.blue, 0.0f), new GradientColorKey(Color.white, 0.5f) }; public void MarkLutAsDirty() { #if UNITY_EDITOR if (lutDirty == false) { SgtHelper.SetDirty(this); } #endif lutDirty = true; } public void UpdateState() { UpdateDirty(); UpdateMaterial(); UpdateModels(); } public void SetCurrentCamera(Camera c) { if (c != null) { UpdateMaterial(); var cameraPosition = c.transform.position; var localCameraPosition = transform.InverseTransformPoint(cameraPosition); var localDistance = localCameraPosition.magnitude; if (localDistance > MeshRadius) { keywords.Add("SGT_A"); } if (Smooth == true) { keywords.Add("SGT_B"); } switch (DensityMode) { case SgtOutputMode.Linear: break; case SgtOutputMode.Logarithmic: keywords.Add("SGT_C"); break; } if (Scattering == true) { keywords.Add("SGT_D"); SgtHelper.WriteMie(MieSharpness, MieStrength, material); if (LimitAlpha == true) { keywords.Add("SGT_E"); } } SgtHelper.WriteLightKeywords(Lights.Count > 0, lightCount, keywords); SgtHelper.WriteShadowKeywords(shadowCount, keywords); SgtHelper.SetKeywords(material, keywords); keywords.Clear(); } } public static SgtJovian CreateJovian(Transform parent = null) { return CreateJovian(parent, Vector3.zero, Quaternion.identity, Vector3.one); } public static SgtJovian CreateJovian(Transform parent, Vector3 localPosition, Quaternion localRotation, Vector3 localScale) { var gameObject = SgtHelper.CreateGameObject("Jovian", parent, localPosition, localRotation, localScale); var jovian = gameObject.AddComponent<SgtJovian>(); return jovian; } #if UNITY_EDITOR [UnityEditor.MenuItem(SgtHelper.GameObjectMenuPrefix + "Jovian", false, 10)] public static void CreateJovianMenuItem() { var jovian = CreateJovian(null); SgtHelper.SelectAndPing(jovian); } #endif protected virtual void Awake() { models.RemoveAll(m => m == null || m.Jovian != this); if (awakeCalled == false) { awakeCalled = true; LightingBrightness.colorKeys = defaultLightingBrightness; LightingColor.colorKeys = defaultLightingColor; RimColor.colorKeys = defaultRimColor; } } protected virtual void OnEnable() { #if UNITY_EDITOR if (AllJovians.Count == 0) { SgtHelper.RepaintAll(); } #endif AllJovians.Add(this); for (var i = models.Count - 1; i >= 0; i--) { var model = models[i]; if (model != null) { model.gameObject.SetActive(true); } } } protected virtual void OnDisable() { AllJovians.Remove(this); for (var i = models.Count - 1; i >= 0; i--) { var model = models[i]; if (model != null) { model.gameObject.SetActive(false); } } } protected virtual void OnDestroy() { SgtHelper.Destroy(material); for (var i = models.Count - 1; i >= 0; i--) { SgtJovianModel.MarkForDestruction(models[i]); } models.Clear(); } protected virtual void Update() { UpdateState(); } #if UNITY_EDITOR protected virtual void OnDrawGizmosSelected() { if (SgtHelper.Enabled(this) == true) { var r0 = transform.lossyScale; SgtHelper.DrawSphere(transform.position, transform.right * r0.x, transform.up * r0.y, transform.forward * r0.z); } } #endif private void UpdateDirty() { if (lightingLut == null || rimLut == null) lutDirty = true; if (lutDirty == true) { lutDirty = false; RegenerateRimLut(); RegenerateLightingLut(); } } private void RegenerateRimLut() { if (rimLut == null || rimLut.width != 1 || rimLut.height != 64) { SgtHelper.Destroy(rimLut); rimLut = SgtHelper.CreateTempTeture2D(1, 64); } for (var y = 0; y < rimLut.height; y++) { var t = y / (float)rimLut.height; rimLut.SetPixel(0, y, RimColor.Evaluate(t)); } rimLut.wrapMode = TextureWrapMode.Clamp; rimLut.Apply(); } private void RegenerateLightingLut() { if (lightingLut == null || lightingLut.width != 1 || lightingLut.height != 64) { SgtHelper.Destroy(lightingLut); lightingLut = SgtHelper.CreateTempTeture2D(1, 64); } for (var y = 0; y < lightingLut.height; y++) { var t = y / (float)lightingLut.height; lightingLut.SetPixel(0, y, LightingBrightness.Evaluate(t) * LightingColor.Evaluate(t)); } lightingLut.wrapMode = TextureWrapMode.Clamp; lightingLut.Apply(); } private void UpdateMaterial() { if (material == null) material = SgtHelper.CreateTempMaterial(SgtHelper.ShaderNamePrefix + "Jovian"); var color = SgtHelper.Brighten(Color, Brightness); var renderQueue = (int)RenderQueue + RenderQueueOffset; var localToWorld = transform.localToWorldMatrix * SgtHelper.Scaling(MeshRadius * 2.0f); // Double mesh radius so the max thickness caps at 1.0 material.renderQueue = renderQueue; material.SetTexture("_MainTex", MainTex); material.SetColor("_Color", color); material.SetFloat("_Power", Power); material.SetFloat("_Density", Density); material.SetMatrix("_WorldToLocal", localToWorld.inverse); material.SetMatrix("_LocalToWorld", localToWorld); material.SetTexture("_RimLut", rimLut); material.SetTexture("_LightingLut", lightingLut); lightCount = SgtHelper.WriteLights(Lights, 2, transform.position, transform, null, material); shadowCount = SgtHelper.WriteShadows(Shadows, 2, material); } private void UpdateModels() { models.RemoveAll(m => m == null); if (Meshes.Count != models.Count) { SgtHelper.ResizeArrayTo(ref models, Meshes.Count, i => SgtJovianModel.Create(this), m => SgtJovianModel.Pool(m)); } for (var i = Meshes.Count - 1; i >= 0; i--) { models[i].ManualUpdate(Meshes[i], material); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /***************************************************************************************************** Rules for Multiple Nested Parent, enforce following constraints 1) At all times, only 1(ONE) FK can be NON-Null in a row. 2) NULL FK values are not associated with PARENT(x), even if PK is NULL in Parent 3) Enforce <rule 1> when a) Any FK value is changed b) A relation created that result in Multiple Nested Child WriteXml 1) WriteXml will throw if <rule 1> is violated 2) if NON-Null FK has parentRow (boolean check) print as Nested, else it will get written as normal row additional notes: We decided to enforce the rule 1 just if Xml being persisted ******************************************************************************************************/ using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Data.Common; using System.Collections.Generic; using System.Threading; namespace System.Data { [DefaultProperty(nameof(RelationName))] [TypeConverter(typeof(RelationshipConverter))] public class DataRelation { // properties private DataSet _dataSet = null; internal PropertyCollection _extendedProperties = null; internal string _relationName = string.Empty; // state private DataKey _childKey; private DataKey _parentKey; private UniqueConstraint _parentKeyConstraint = null; private ForeignKeyConstraint _childKeyConstraint = null; // Design time serialization internal string[] _parentColumnNames = null; internal string[] _childColumnNames = null; internal string _parentTableName = null; internal string _childTableName = null; internal string _parentTableNamespace = null; internal string _childTableNamespace = null; /// <summary> /// This stores whether the child element appears beneath the parent in the XML persisted files. /// </summary> internal bool _nested = false; /// <summary> /// This stores whether the relationship should make sure that KeyConstraints and ForeignKeyConstraints /// exist when added to the ConstraintsCollections of the table. /// </summary> internal bool _createConstraints; private bool _checkMultipleNested = true; private static int s_objectTypeCount; // Bid counter private readonly int _objectID = Interlocked.Increment(ref s_objectTypeCount); /// <summary> /// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name, /// parent, and child columns. /// </summary> public DataRelation(string relationName, DataColumn parentColumn, DataColumn childColumn) : this(relationName, parentColumn, childColumn, true) { } /// <summary> /// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name, parent, and child columns, and /// value to create constraints. /// </summary> public DataRelation(string relationName, DataColumn parentColumn, DataColumn childColumn, bool createConstraints) { DataCommonEventSource.Log.Trace("<ds.DataRelation.DataRelation|API> {0}, relationName='{1}', parentColumn={2}, childColumn={3}, createConstraints={4}", ObjectID, relationName, (parentColumn != null) ? parentColumn.ObjectID : 0, (childColumn != null) ? childColumn.ObjectID : 0, createConstraints); DataColumn[] parentColumns = new DataColumn[1]; parentColumns[0] = parentColumn; DataColumn[] childColumns = new DataColumn[1]; childColumns[0] = childColumn; Create(relationName, parentColumns, childColumns, createConstraints); } /// <summary> /// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name /// and matched arrays of parent and child columns. /// </summary> public DataRelation(string relationName, DataColumn[] parentColumns, DataColumn[] childColumns) : this(relationName, parentColumns, childColumns, true) { } /// <summary> /// Initializes a new instance of the <see cref='System.Data.DataRelation'/> class using the specified name, matched arrays of parent /// and child columns, and value to create constraints. /// </summary> public DataRelation(string relationName, DataColumn[] parentColumns, DataColumn[] childColumns, bool createConstraints) { Create(relationName, parentColumns, childColumns, createConstraints); } [Browsable(false)] // design-time ctor public DataRelation(string relationName, string parentTableName, string childTableName, string[] parentColumnNames, string[] childColumnNames, bool nested) { _relationName = relationName; _parentColumnNames = parentColumnNames; _childColumnNames = childColumnNames; _parentTableName = parentTableName; _childTableName = childTableName; _nested = nested; } [Browsable(false)] // design-time ctor public DataRelation(string relationName, string parentTableName, string parentTableNamespace, string childTableName, string childTableNamespace, string[] parentColumnNames, string[] childColumnNames, bool nested) { _relationName = relationName; _parentColumnNames = parentColumnNames; _childColumnNames = childColumnNames; _parentTableName = parentTableName; _childTableName = childTableName; _parentTableNamespace = parentTableNamespace; _childTableNamespace = childTableNamespace; _nested = nested; } /// <summary> /// Gets the child columns of this relation. /// </summary> public virtual DataColumn[] ChildColumns { get { CheckStateForProperty(); return _childKey.ToArray(); } } internal DataColumn[] ChildColumnsReference { get { CheckStateForProperty(); return _childKey.ColumnsReference; } } /// <summary> /// The internal Key object for the child table. /// </summary> internal DataKey ChildKey { get { CheckStateForProperty(); return _childKey; } } /// <summary> /// Gets the child table of this relation. /// </summary> public virtual DataTable ChildTable { get { CheckStateForProperty(); return _childKey.Table; } } /// <summary> /// Gets the <see cref='System.Data.DataSet'/> to which the relations' collection belongs to. /// </summary> [DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden), Browsable(false)] public virtual DataSet DataSet { get { CheckStateForProperty(); return _dataSet; } } internal string[] ParentColumnNames => _parentKey.GetColumnNames(); internal string[] ChildColumnNames => _childKey.GetColumnNames(); private static bool IsKeyNull(object[] values) { for (int i = 0; i < values.Length; i++) { if (!DataStorage.IsObjectNull(values[i])) { return false; } } return true; } /// <summary> /// Gets the child rows for the parent row across the relation using the version given /// </summary> internal static DataRow[] GetChildRows(DataKey parentKey, DataKey childKey, DataRow parentRow, DataRowVersion version) { object[] values = parentRow.GetKeyValues(parentKey, version); if (IsKeyNull(values)) { return childKey.Table.NewRowArray(0); } Index index = childKey.GetSortIndex((version == DataRowVersion.Original) ? DataViewRowState.OriginalRows : DataViewRowState.CurrentRows); return index.GetRows(values); } /// <summary> /// Gets the parent rows for the given child row across the relation using the version given /// </summary> internal static DataRow[] GetParentRows(DataKey parentKey, DataKey childKey, DataRow childRow, DataRowVersion version) { object[] values = childRow.GetKeyValues(childKey, version); if (IsKeyNull(values)) { return parentKey.Table.NewRowArray(0); } Index index = parentKey.GetSortIndex((version == DataRowVersion.Original) ? DataViewRowState.OriginalRows : DataViewRowState.CurrentRows); return index.GetRows(values); } internal static DataRow GetParentRow(DataKey parentKey, DataKey childKey, DataRow childRow, DataRowVersion version) { if (!childRow.HasVersion((version == DataRowVersion.Original) ? DataRowVersion.Original : DataRowVersion.Current)) { if (childRow._tempRecord == -1) { return null; } } object[] values = childRow.GetKeyValues(childKey, version); if (IsKeyNull(values)) { return null; } Index index = parentKey.GetSortIndex((version == DataRowVersion.Original) ? DataViewRowState.OriginalRows : DataViewRowState.CurrentRows); Range range = index.FindRecords(values); if (range.IsNull) { return null; } if (range.Count > 1) { throw ExceptionBuilder.MultipleParents(); } return parentKey.Table._recordManager[index.GetRecord(range.Min)]; } /// <summary> /// Internally sets the DataSet pointer. /// </summary> internal void SetDataSet(DataSet dataSet) { if (_dataSet != dataSet) { _dataSet = dataSet; } } internal void SetParentRowRecords(DataRow childRow, DataRow parentRow) { object[] parentKeyValues = parentRow.GetKeyValues(ParentKey); if (childRow._tempRecord != -1) { ChildTable._recordManager.SetKeyValues(childRow._tempRecord, ChildKey, parentKeyValues); } if (childRow._newRecord != -1) { ChildTable._recordManager.SetKeyValues(childRow._newRecord, ChildKey, parentKeyValues); } if (childRow._oldRecord != -1) { ChildTable._recordManager.SetKeyValues(childRow._oldRecord, ChildKey, parentKeyValues); } } /// <summary> /// Gets the parent columns of this relation. /// </summary> public virtual DataColumn[] ParentColumns { get { CheckStateForProperty(); return _parentKey.ToArray(); } } internal DataColumn[] ParentColumnsReference => _parentKey.ColumnsReference; /// <summary> /// The internal constraint object for the parent table. /// </summary> internal DataKey ParentKey { get { CheckStateForProperty(); return _parentKey; } } /// <summary> /// Gets the parent table of this relation. /// </summary> public virtual DataTable ParentTable { get { CheckStateForProperty(); return _parentKey.Table; } } /// <summary> /// Gets or sets the name used to look up this relation in the parent /// data set's <see cref='System.Data.DataRelationCollection'/>. /// </summary> [DefaultValue("")] public virtual string RelationName { get { CheckStateForProperty(); return _relationName; } set { long logScopeId = DataCommonEventSource.Log.EnterScope("<ds.DataRelation.set_RelationName|API> {0}, '{1}'", ObjectID, value); try { if (value == null) { value = string.Empty; } CultureInfo locale = (_dataSet != null ? _dataSet.Locale : CultureInfo.CurrentCulture); if (string.Compare(_relationName, value, true, locale) != 0) { if (_dataSet != null) { if (value.Length == 0) { throw ExceptionBuilder.NoRelationName(); } _dataSet.Relations.RegisterName(value); if (_relationName.Length != 0) { _dataSet.Relations.UnregisterName(_relationName); } } _relationName = value; ((DataRelationCollection.DataTableRelationCollection)(ParentTable.ChildRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this)); ((DataRelationCollection.DataTableRelationCollection)(ChildTable.ParentRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this)); } else if (string.Compare(_relationName, value, false, locale) != 0) { _relationName = value; ((DataRelationCollection.DataTableRelationCollection)(ParentTable.ChildRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this)); ((DataRelationCollection.DataTableRelationCollection)(ChildTable.ParentRelations)).OnRelationPropertyChanged(new CollectionChangeEventArgs(CollectionChangeAction.Refresh, this)); } } finally { DataCommonEventSource.Log.ExitScope(logScopeId); } } } internal void CheckNamespaceValidityForNestedRelations(string ns) { foreach (DataRelation rel in ChildTable.ParentRelations) { if (rel == this || rel.Nested) { if (rel.ParentTable.Namespace != ns) { throw ExceptionBuilder.InValidNestedRelation(ChildTable.TableName); } } } } internal void CheckNestedRelations() { DataCommonEventSource.Log.Trace("<ds.DataRelation.CheckNestedRelations|INFO> {0}", ObjectID); Debug.Assert(DataSet == null || !_nested, "this relation supposed to be not in dataset or not nested"); // 1. There is no other relation (R) that has this.ChildTable as R.ChildTable // This is not valid for Whidbey anymore so the code has been removed // 2. There is no loop in nested relations #if DEBUG int numTables = ParentTable.DataSet.Tables.Count; #endif DataTable dt = ParentTable; if (ChildTable == ParentTable) { if (string.Compare(ChildTable.TableName, ChildTable.DataSet.DataSetName, true, ChildTable.DataSet.Locale) == 0) throw ExceptionBuilder.SelfnestedDatasetConflictingName(ChildTable.TableName); return; //allow self join tables. } List<DataTable> list = new List<DataTable>(); list.Add(ChildTable); // We have already checked for nested relaion UP for (int i = 0; i < list.Count; ++i) { DataRelation[] relations = list[i].NestedParentRelations; foreach (DataRelation rel in relations) { if (rel.ParentTable == ChildTable && rel.ChildTable != ChildTable) { throw ExceptionBuilder.LoopInNestedRelations(ChildTable.TableName); } if (!list.Contains(rel.ParentTable)) { // check for self nested list.Add(rel.ParentTable); } } } } /******************** The Namespace of a table nested inside multiple parents can be 1. Explicitly specified 2. Inherited from Parent Table 3. Empty (Form = unqualified case) However, Schema does not allow (3) to be a global element and multiple nested child has to be a global element. Therefore we'll reduce case (3) to (2) if all parents have same namespace else throw. ********************/ /// <summary> /// Gets or sets a value indicating whether relations are nested. /// </summary> [DefaultValue(false)] public virtual bool Nested { get { CheckStateForProperty(); return _nested; } set { long logScopeId = DataCommonEventSource.Log.EnterScope("<ds.DataRelation.set_Nested|API> {0}, {1}", ObjectID, value); try { if (_nested != value) { if (_dataSet != null) { if (value) { if (ChildTable.IsNamespaceInherited()) { // if not added to collection, don't do this check CheckNamespaceValidityForNestedRelations(ParentTable.Namespace); } Debug.Assert(ChildTable != null, "On a DataSet, but not on Table. Bad state"); ForeignKeyConstraint constraint = ChildTable.Constraints.FindForeignKeyConstraint(ChildKey.ColumnsReference, ParentKey.ColumnsReference); if (constraint != null) { constraint.CheckConstraint(); } ValidateMultipleNestedRelations(); } } if (!value && (_parentKey.ColumnsReference[0].ColumnMapping == MappingType.Hidden)) { throw ExceptionBuilder.RelationNestedReadOnly(); } if (value) { ParentTable.Columns.RegisterColumnName(ChildTable.TableName, null); } else { ParentTable.Columns.UnregisterName(ChildTable.TableName); } RaisePropertyChanging(nameof(Nested)); if (value) { CheckNestedRelations(); if (DataSet != null) if (ParentTable == ChildTable) { foreach (DataRow row in ChildTable.Rows) { row.CheckForLoops(this); } if (ChildTable.DataSet != null && (string.Compare(ChildTable.TableName, ChildTable.DataSet.DataSetName, true, ChildTable.DataSet.Locale) == 0)) { throw ExceptionBuilder.DatasetConflictingName(_dataSet.DataSetName); } ChildTable._fNestedInDataset = false; } else { foreach (DataRow row in ChildTable.Rows) { row.GetParentRow(this); } } ParentTable.ElementColumnCount++; } else { ParentTable.ElementColumnCount--; } _nested = value; ChildTable.CacheNestedParent(); if (value) { if (string.IsNullOrEmpty(ChildTable.Namespace) && ((ChildTable.NestedParentsCount > 1) || ((ChildTable.NestedParentsCount > 0) && !(ChildTable.DataSet.Relations.Contains(RelationName))))) { string parentNs = null; foreach (DataRelation rel in ChildTable.ParentRelations) { if (rel.Nested) { if (null == parentNs) { parentNs = rel.ParentTable.Namespace; } else { if (!string.Equals(parentNs, rel.ParentTable.Namespace, StringComparison.Ordinal)) { _nested = false; throw ExceptionBuilder.InvalidParentNamespaceinNestedRelation(ChildTable.TableName); } } } } // if not already in memory , form == unqualified if (CheckMultipleNested && ChildTable._tableNamespace != null && ChildTable._tableNamespace.Length == 0) { throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName); } ChildTable._tableNamespace = null; // if we dont throw, then let it inherit the Namespace } } } } finally { DataCommonEventSource.Log.ExitScope(logScopeId); } } } /// <summary> /// Gets the constraint which ensures values in a column are unique. /// </summary> public virtual UniqueConstraint ParentKeyConstraint { get { CheckStateForProperty(); return _parentKeyConstraint; } } internal void SetParentKeyConstraint(UniqueConstraint value) { Debug.Assert(_parentKeyConstraint == null || value == null, "ParentKeyConstraint should not have been set already."); _parentKeyConstraint = value; } /// <summary> /// Gets the <see cref='System.Data.ForeignKeyConstraint'/> for the relation. /// </summary> public virtual ForeignKeyConstraint ChildKeyConstraint { get { CheckStateForProperty(); return _childKeyConstraint; } } /// <summary> /// Gets the collection of custom user information. /// </summary> [Browsable(false)] public PropertyCollection ExtendedProperties => _extendedProperties ?? (_extendedProperties = new PropertyCollection()); internal bool CheckMultipleNested { get { return _checkMultipleNested; } set { _checkMultipleNested = value; } } internal void SetChildKeyConstraint(ForeignKeyConstraint value) { Debug.Assert(_childKeyConstraint == null || value == null, "ChildKeyConstraint should not have been set already."); _childKeyConstraint = value; } internal event PropertyChangedEventHandler PropertyChanging; // If we're not in a dataSet relations collection, we need to verify on every property get that we're // still a good relation object. internal void CheckState() { if (_dataSet == null) { _parentKey.CheckState(); _childKey.CheckState(); if (_parentKey.Table.DataSet != _childKey.Table.DataSet) { throw ExceptionBuilder.RelationDataSetMismatch(); } if (_childKey.ColumnsEqual(_parentKey)) { throw ExceptionBuilder.KeyColumnsIdentical(); } for (int i = 0; i < _parentKey.ColumnsReference.Length; i++) { if ((_parentKey.ColumnsReference[i].DataType != _childKey.ColumnsReference[i].DataType) || ((_parentKey.ColumnsReference[i].DataType == typeof(DateTime)) && (_parentKey.ColumnsReference[i].DateTimeMode != _childKey.ColumnsReference[i].DateTimeMode) && ((_parentKey.ColumnsReference[i].DateTimeMode & _childKey.ColumnsReference[i].DateTimeMode) != DataSetDateTime.Unspecified))) { // allow unspecified and unspecifiedlocal throw ExceptionBuilder.ColumnsTypeMismatch(); } } } } /// <summary> /// Checks to ensure the DataRelation is a valid object, even if it doesn't /// belong to a <see cref='System.Data.DataSet'/>. /// </summary> protected void CheckStateForProperty() { try { CheckState(); } catch (Exception e) when (ADP.IsCatchableExceptionType(e)) { throw ExceptionBuilder.BadObjectPropertyAccess(e.Message); } } private void Create(string relationName, DataColumn[] parentColumns, DataColumn[] childColumns, bool createConstraints) { long logScopeId = DataCommonEventSource.Log.EnterScope("<ds.DataRelation.Create|INFO> {0}, relationName='{1}', createConstraints={2}", ObjectID, relationName, createConstraints); try { _parentKey = new DataKey(parentColumns, true); _childKey = new DataKey(childColumns, true); if (parentColumns.Length != childColumns.Length) { throw ExceptionBuilder.KeyLengthMismatch(); } for (int i = 0; i < parentColumns.Length; i++) { if ((parentColumns[i].Table.DataSet == null) || (childColumns[i].Table.DataSet == null)) { throw ExceptionBuilder.ParentOrChildColumnsDoNotHaveDataSet(); } } CheckState(); _relationName = (relationName == null ? "" : relationName); _createConstraints = createConstraints; } finally { DataCommonEventSource.Log.ExitScope(logScopeId); } } internal DataRelation Clone(DataSet destination) { DataCommonEventSource.Log.Trace("<ds.DataRelation.Clone|INFO> {0}, destination={1}", ObjectID, (destination != null) ? destination.ObjectID : 0); DataTable parent = destination.Tables[ParentTable.TableName, ParentTable.Namespace]; DataTable child = destination.Tables[ChildTable.TableName, ChildTable.Namespace]; int keyLength = _parentKey.ColumnsReference.Length; DataColumn[] parentColumns = new DataColumn[keyLength]; DataColumn[] childColumns = new DataColumn[keyLength]; for (int i = 0; i < keyLength; i++) { parentColumns[i] = parent.Columns[ParentKey.ColumnsReference[i].ColumnName]; childColumns[i] = child.Columns[ChildKey.ColumnsReference[i].ColumnName]; } DataRelation clone = new DataRelation(_relationName, parentColumns, childColumns, false); clone.CheckMultipleNested = false; // disable the check in clone as it is already created clone.Nested = Nested; clone.CheckMultipleNested = true; // enable the check // ...Extended Properties if (_extendedProperties != null) { foreach (object key in _extendedProperties.Keys) { clone.ExtendedProperties[key] = _extendedProperties[key]; } } return clone; } protected internal void OnPropertyChanging(PropertyChangedEventArgs pcevent) { if (PropertyChanging != null) { DataCommonEventSource.Log.Trace("<ds.DataRelation.OnPropertyChanging|INFO> {0}", ObjectID); PropertyChanging(this, pcevent); } } protected internal void RaisePropertyChanging(string name) { OnPropertyChanging(new PropertyChangedEventArgs(name)); } /// <summary> /// </summary> public override string ToString() => RelationName; internal void ValidateMultipleNestedRelations() { // find all nested relations that this child table has // if this relation is the only relation it has, then fine, // otherwise check if all relations are created from XSD, without using Key/KeyRef // check all keys to see autogenerated if (!Nested || !CheckMultipleNested) // no need for this verification { return; } if (0 < ChildTable.NestedParentRelations.Length) { DataColumn[] childCols = ChildColumns; if (childCols.Length != 1 || !IsAutoGenerated(childCols[0])) { throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName); } if (!XmlTreeGen.AutoGenerated(this)) { throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName); } foreach (Constraint cs in ChildTable.Constraints) { if (cs is ForeignKeyConstraint) { ForeignKeyConstraint fk = (ForeignKeyConstraint)cs; if (!XmlTreeGen.AutoGenerated(fk, true)) { throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName); } } else { UniqueConstraint unique = (UniqueConstraint)cs; if (!XmlTreeGen.AutoGenerated(unique)) { throw ExceptionBuilder.TableCantBeNestedInTwoTables(ChildTable.TableName); } } } } } private bool IsAutoGenerated(DataColumn col) { if (col.ColumnMapping != MappingType.Hidden) { return false; } if (col.DataType != typeof(int)) { return false; } string generatedname = col.Table.TableName + "_Id"; if ((col.ColumnName == generatedname) || (col.ColumnName == generatedname + "_0")) { return true; } generatedname = ParentColumnsReference[0].Table.TableName + "_Id"; if ((col.ColumnName == generatedname) || (col.ColumnName == generatedname + "_0")) { return true; } return false; } internal int ObjectID => _objectID; } }
using System.Collections.Generic; using BEPUphysics; using BEPUphysics.Entities.Prefabs; using BEPUphysics.Vehicle; using BEPUphysicsDrawer.Models; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; using BEPUphysics.CollisionShapes.ConvexShapes; using BEPUphysics.CollisionShapes; using BEPUutilities; using ConversionHelper; namespace BEPUphysicsDemos.AlternateMovement { /// <summary> /// Handles input and movement of a vehicle in the game. /// Acts as the 'front end' for the bookkeeping and math of the vehicle within the physics engine. /// </summary> public class TankInput { /// <summary> /// Wheels belonging to the left track of the tank. /// </summary> private readonly List<Wheel> leftTrack = new List<Wheel>(); /// <summary> /// Wheels belonging to the right track of the tank. /// </summary> private readonly List<Wheel> rightTrack = new List<Wheel>(); /// <summary> /// Speed that the vehicle tries towreach when moving backward. /// </summary> public float BackwardSpeed = -15; /// <summary> /// Default coefficient of sliding friction on an individual wheel in the tank track. /// </summary> public float BaseSlidingFriction; /// <summary> /// Speed that the vehicle tries to reach when moving forward. /// </summary> public float ForwardSpeed = 15; /// <summary> /// Whether or not to use the vehicle's input. /// </summary> public bool IsActive; /// <summary> /// Default maximum force that an individual wheel in a tank track can exert. /// </summary> public float MaximumDriveForce; /// <summary> /// Draws the body and wheels. /// </summary> public ModelDrawer ModelDrawer; /// <summary> /// Owning space of the vehicle. /// </summary> public Space Space; /// <summary> /// Physics representation of the vehicle. /// </summary> public Vehicle Vehicle; /// <summary> /// List of graphical representations of the wheels on the vehicle. /// </summary> public List<DisplayModel> WheelModels; float FrictionBlender(float wheelFriction, float supportFriction, bool isKineticFriction, Wheel wheel) { //The default friction blender is multiplicative. This tank had its coefficients designed for averaged coefficients. //So, let's average the friction coefficients! //Try to fiddle with the configuration and this blender to see how you like other approaches. return (wheelFriction + supportFriction) / 2; } /// <summary> /// Gets the camera control scheme ued by this input manager. /// </summary> public ChaseCameraControlScheme CameraControlScheme { get; private set; } /// <summary> /// Constructs the front end and the internal physics representation of the vehicle. /// </summary> /// <param name="position">Position of the tank.</param> /// <param name="owningSpace">Space to add the vehicle to.</param> /// <param name="camera">Camera to attach to the vehicle.</param> /// <param name="game">Running game.</param> /// <param name="drawer">Drawer used to draw the tank.</param> /// <param name="wheelModel">Model to use for the 'wheels' of the tank.</param> /// <param name="wheelTexture">Texture of the wheels on the tank.</param> public TankInput(Vector3 position, Space owningSpace, Camera camera, DemosGame game, ModelDrawer drawer, Model wheelModel, Texture2D wheelTexture) { var bodies = new List<CompoundShapeEntry>() { new CompoundShapeEntry(new BoxShape(4f, 1, 8), new Vector3(0, 0, 0), 500), new CompoundShapeEntry(new BoxShape(3, .7f, 4f), new Vector3(0, .5f + .35f, .5f), 1) }; var body = new CompoundBody(bodies, 501); body.CollisionInformation.LocalPosition = new Vector3(0, .5f, 0); body.Position = (position); //At first, just keep it out of the way. Vehicle = new Vehicle(body); #region RaycastWheelShapes //The wheel model used is not aligned initially with how a wheel would normally look, so rotate them. MaximumDriveForce = 1800; BaseSlidingFriction = 3; Matrix wheelGraphicRotation = Matrix.CreateFromAxisAngle(Vector3.Forward, MathHelper.PiOver2); for (int i = 0; i < 6; i++) { var toAdd = new Wheel( new RaycastWheelShape(.375f, wheelGraphicRotation), new WheelSuspension(2000, 300f, Vector3.Down, 1.3f, new Vector3(-1.9f, 0, -2.9f + i * 1.15f)), new WheelDrivingMotor(10, MaximumDriveForce, MaximumDriveForce), new WheelBrake(7, 7, 1.0f), new WheelSlidingFriction(BaseSlidingFriction, BaseSlidingFriction)); toAdd.DrivingMotor.GripFrictionBlender = FrictionBlender; toAdd.Brake.FrictionBlender = FrictionBlender; toAdd.SlidingFriction.FrictionBlender = FrictionBlender; Vehicle.AddWheel(toAdd); leftTrack.Add(toAdd); } for (int i = 0; i < 6; i++) { var toAdd = new Wheel( new RaycastWheelShape(.375f, wheelGraphicRotation), new WheelSuspension(2000, 300f, Vector3.Down, 1.3f, new Vector3(1.9f, 0, -2.9f + i * 1.15f)), new WheelDrivingMotor(10, 2000, 1000), new WheelBrake(7, 7, 1.0f), new WheelSlidingFriction(BaseSlidingFriction, BaseSlidingFriction)); toAdd.DrivingMotor.GripFrictionBlender = FrictionBlender; toAdd.Brake.FrictionBlender = FrictionBlender; toAdd.SlidingFriction.FrictionBlender = FrictionBlender; Vehicle.AddWheel(toAdd); rightTrack.Add(toAdd); } #endregion foreach (Wheel wheel in Vehicle.Wheels) { //This is a cosmetic setting that makes it looks like the car doesn't have antilock brakes. wheel.Shape.FreezeWheelsWhileBraking = true; //By default, wheels use as many iterations as the space. By lowering it, //performance can be improved at the cost of a little accuracy. wheel.Suspension.SolverSettings.MaximumIterationCount = 1; wheel.Brake.SolverSettings.MaximumIterationCount = 1; wheel.SlidingFriction.SolverSettings.MaximumIterationCount = 1; wheel.DrivingMotor.SolverSettings.MaximumIterationCount = 1; } Space = owningSpace; Space.Add(Vehicle); ModelDrawer = drawer; DisplayModel model; WheelModels = new List<DisplayModel>(); for (int k = 0; k < Vehicle.Wheels.Count; k++) { Vehicle.Wheels[k].Shape.Detector.Tag = "noDisplayObject"; model = new DisplayModel(wheelModel, ModelDrawer); ModelDrawer.Add(model); WheelModels.Add(model); model.Texture = wheelTexture; } CameraControlScheme = new ChaseCameraControlScheme(Vehicle.Body, new Vector3(0, 0.6f, 0), true, 10, camera, game); } /// <summary> /// Gives the vehicle control over the camera and movement input. /// </summary> public void Activate(Vector3 position) { if (!IsActive) { IsActive = true; //Put the vehicle where the camera is. Vehicle.Body.Position = position; Vehicle.Body.LinearVelocity = Vector3.Zero; Vehicle.Body.AngularVelocity = Vector3.Zero; Vehicle.Body.Orientation = Quaternion.Identity; } } /// <summary> /// Returns input control to the camera. /// </summary> public void Deactivate() { if (IsActive) { IsActive = false; } } /// <summary> /// Handles the input and movement of the character. /// </summary> /// <param name="dt">Time since last frame in simulation seconds.</param> /// <param name="keyboardInput">Keyboard state.</param> /// <param name="gamePadInput">Gamepad state.</param> public void Update(float dt, KeyboardState keyboardInput, GamePadState gamePadInput) { //Update the wheel's graphics. for (int k = 0; k < WheelModels.Count; k++) { WheelModels[k].WorldTransform = Vehicle.Wheels[k].Shape.WorldTransform; } if (IsActive) { CameraControlScheme.Update(dt); //The reason for the more complicated handling of turning is that real tanks' //treads target a certain speed and will apply positive or negative forces //to reach it. //The normal Vehicle class is slightly different. If you're rolling down a hill //with a target velocity of 30 and you're actually going 40, the vehicle doesn't //try to slow down. It won't have to apply any force to reach its goal, and lets //itself coast faster. //To change direction while moving, a tank can slow down one track. Friction will //force a pivot. Slowing down one track on this vehicle doesn't do anything //because the wheels will happily roll as fast as the other track, even if not //applying any driving force. //To overcome this difference, the opposite track actually tries to drive backward. //This forces the vehicle wheels to actually do work to slow down the track. //Going at full speed and reversing a track's direction can be a little jarring, so //its maximum force is modified dynamically to make it feel more correct. #if XBOX360 float speed = gamePadInput.Triggers.Right * ForwardSpeed + gamePadInput.Triggers.Left * BackwardSpeed; foreach (Wheel wheel in Vehicle.Wheels) { wheel.DrivingMotor.TargetSpeed = speed; wheel.DrivingMotor.MaximumForwardForce = MaximumDriveForce; wheel.DrivingMotor.MaximumBackwardForce = MaximumDriveForce; wheel.SlidingFriction.KineticCoefficient = BaseSlidingFriction; wheel.SlidingFriction.StaticCoefficient = BaseSlidingFriction; } //Thumbsticks can have small values even when left alone, so allow a little margin. const float stickMargin = .1f; //"Approximately stationary" is good enough to turn normally. Pick a reasonable tolerance. const float fullTurnSpeedLimit = 1; if (speed > fullTurnSpeedLimit) { if (gamePadInput.ThumbSticks.Left.X < -stickMargin) { foreach (Wheel wheel in leftTrack) { //Tell one of the tracks to reverse direction, but don't let it //run at full force. This helps prevents wild spinouts and encourages //more 'tanky' movement. wheel.DrivingMotor.TargetSpeed = -gamePadInput.ThumbSticks.Left.X * BackwardSpeed; wheel.DrivingMotor.MaximumBackwardForce = MaximumDriveForce / 2; } //It's possible to configure the tank in such a way //that you won't have to use separate sliding frictions while turning, //but cheating is a lot easier. ReduceSlidingFriction(); } if (gamePadInput.ThumbSticks.Left.X > stickMargin) { foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = gamePadInput.ThumbSticks.Left.X * BackwardSpeed; wheel.DrivingMotor.MaximumBackwardForce = MaximumDriveForce / 2; } ReduceSlidingFriction(); } } else if (speed < -fullTurnSpeedLimit) { if (gamePadInput.ThumbSticks.Left.X > stickMargin) { foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = gamePadInput.ThumbSticks.Left.X * ForwardSpeed; wheel.DrivingMotor.MaximumForwardForce = MaximumDriveForce / 2; } ReduceSlidingFriction(); } if (gamePadInput.ThumbSticks.Left.X < -stickMargin) { foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = -gamePadInput.ThumbSticks.Left.X * ForwardSpeed; wheel.DrivingMotor.MaximumForwardForce = MaximumDriveForce / 2; } ReduceSlidingFriction(); } } else { if (gamePadInput.ThumbSticks.Left.X < 0) { //Turn left foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = -gamePadInput.ThumbSticks.Left.X * BackwardSpeed / 5; } foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = -gamePadInput.ThumbSticks.Left.X * ForwardSpeed / 5; } ReduceSlidingFriction(); } if (gamePadInput.ThumbSticks.Left.X > 0) { //Turn right foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = gamePadInput.ThumbSticks.Left.X * ForwardSpeed / 5; } foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = gamePadInput.ThumbSticks.Left.X * BackwardSpeed / 5; } ReduceSlidingFriction(); } } if (gamePadInput.IsButtonDown(Buttons.LeftStick)) foreach (Wheel wheel in Vehicle.Wheels) { wheel.Brake.IsBraking = true; } else foreach (Wheel wheel in Vehicle.Wheels) { wheel.Brake.IsBraking = false; } #else //Reset properties to defaults. foreach (Wheel wheel in Vehicle.Wheels) { wheel.Brake.IsBraking = false; wheel.DrivingMotor.MaximumForwardForce = MaximumDriveForce; wheel.DrivingMotor.MaximumBackwardForce = MaximumDriveForce; wheel.SlidingFriction.KineticCoefficient = BaseSlidingFriction; wheel.SlidingFriction.StaticCoefficient = BaseSlidingFriction; } List<Wheel> wheelsToAccelerate; if (keyboardInput.IsKeyDown(Keys.E)) { if (keyboardInput.IsKeyDown(Keys.S)) { //Turn left while going forward foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed; } foreach (Wheel wheel in leftTrack) { //Tell one of the tracks to reverse direction, but don't let it //run at full force. This helps prevents wild spinouts and encourages //more 'tanky' movement. wheel.DrivingMotor.TargetSpeed = BackwardSpeed; wheel.DrivingMotor.MaximumBackwardForce = MaximumDriveForce / 3; } //It's possible to configure the tank in such a way //that you won't have to use separate sliding frictions while turning, //but cheating is a lot easier. ReduceSlidingFriction(); } else if (keyboardInput.IsKeyDown(Keys.F)) { //Turn right while going forward foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed; } foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = BackwardSpeed; wheel.DrivingMotor.MaximumBackwardForce = MaximumDriveForce / 3; } ReduceSlidingFriction(); } else { foreach (Wheel wheel in Vehicle.Wheels) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed; } } } else if (keyboardInput.IsKeyDown(Keys.D)) { if (keyboardInput.IsKeyDown(Keys.F)) { //Turn right while going back foreach (var wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = BackwardSpeed; } foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed; wheel.DrivingMotor.MaximumForwardForce = MaximumDriveForce / 3; } ReduceSlidingFriction(); } else if (keyboardInput.IsKeyDown(Keys.S)) { //Turn left while going back foreach (var wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = BackwardSpeed; } foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed; wheel.DrivingMotor.MaximumForwardForce = MaximumDriveForce / 3; } ReduceSlidingFriction(); } else { foreach (Wheel wheel in Vehicle.Wheels) { wheel.DrivingMotor.TargetSpeed = BackwardSpeed; } } } else if (keyboardInput.IsKeyDown(Keys.S)) { //Turn left foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = BackwardSpeed / 5; } foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed / 5; } ReduceSlidingFriction(); } else if (keyboardInput.IsKeyDown(Keys.F)) { //Turn right foreach (Wheel wheel in leftTrack) { wheel.DrivingMotor.TargetSpeed = ForwardSpeed / 5; } foreach (Wheel wheel in rightTrack) { wheel.DrivingMotor.TargetSpeed = BackwardSpeed / 5; } ReduceSlidingFriction(); } else { //Idle foreach (Wheel wheel in Vehicle.Wheels) { wheel.DrivingMotor.TargetSpeed = 0; } } if (keyboardInput.IsKeyDown(Keys.Space)) { //Brake foreach (Wheel wheel in Vehicle.Wheels) { wheel.Brake.IsBraking = true; } } #endif } else { //Parking brake foreach (Wheel wheel in Vehicle.Wheels) { wheel.Brake.IsBraking = true; //Don't want the car to keep trying to drive. wheel.DrivingMotor.TargetSpeed = 0; } } } private void ReduceSlidingFriction() { ////If you want to make turning while moving faster, you can enable this. ////Careful- with sliding friction reduction, the tank can twirl pretty fast! //foreach (Wheel wheel in Vehicle.Wheels) //{ // wheel.SlidingFriction.StaticCoefficient = BaseSlidingFriction * .8f; // wheel.SlidingFriction.KineticCoefficient = BaseSlidingFriction * .8f; //} } } }
using PhotoNet.Common; using RawNet.Decoder.Decompressor; using RawNet.Format.Tiff; using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; namespace RawNet.Decoder { class ORFDecoder : TIFFDecoder { internal ORFDecoder(Stream file) : base(file) { } public override void DecodeRaw() { List<IFD> data = ifd.GetIFDsWithTag(TagType.STRIPOFFSETS); if (data.Count == 0) throw new RawDecoderException("ORF Decoder: No image data found"); IFD raw = data[0]; int compression = raw.GetEntry(TagType.COMPRESSION).GetInt(0); if (1 != compression) throw new RawDecoderException("ORF Decoder: Unsupported compression"); Tag offsets = raw.GetEntry(TagType.STRIPOFFSETS); Tag counts = raw.GetEntry(TagType.STRIPBYTECOUNTS); if (counts.dataCount != offsets.dataCount) throw new RawDecoderException("ORF Decoder: Byte count number does not match strip size: count:" + counts.dataCount + ", strips:" + offsets.dataCount); uint off = raw.GetEntry(TagType.STRIPOFFSETS).GetUInt(0); uint size = 0; for (int i = 0; i < counts.dataCount; i++) size += counts.GetUInt(i); if (!reader.IsValid(off, size)) throw new RawDecoderException("ORF Decoder: Truncated file"); uint width = raw.GetEntry(TagType.IMAGEWIDTH).GetUInt(0); uint height = raw.GetEntry(TagType.IMAGELENGTH).GetUInt(0); rawImage.fullSize.dim = new Point2D(width, height); rawImage.Init(false); // We add 3 bytes slack, since the bitpump might be a few bytes ahead. ImageBinaryReader input = new ImageBinaryReader(reader.BaseStream, off); input.BaseStream.Position = off; try { if (offsets.dataCount != 1) DecodeUncompressed(input, width, height, size, raw.endian); else DecodeCompressed(input, width, height); } catch (IOException e) { rawImage.errors.Add(e.Message); } } private void DecodeUncompressed(ImageBinaryReader input, uint width, uint height, long size, Endianness endian) { /* RawDecompressor.Decode12BitRawWithControl(s, w, h, rawImage); else if ((hints.ContainsKey("jpeg32_bitorder"))) { Point2D dim = new Point2D(w, h), pos = new Point2D(0, 0); RawDecompressor.ReadUncompressedRaw(s, dim, pos, w * 12 / 8, 12, BitOrder.Jpeg32, rawImage); } else*/ if (size >= width * height * 2) { // We're in an unpacked raw if (endian == Endianness.Little) RawDecompressor.Decode12BitRawUnpacked(input, new Point2D(width, height), new Point2D(), rawImage); else RawDecompressor.Decode12BitRawBEunpackedLeftAligned(input, new Point2D(width, height), new Point2D(), rawImage); } else if (size >= width * height * 3 / 2) { // We're in one of those weird interlaced packed raws RawDecompressor.Decode12BitRawBEInterlaced(input, new Point2D(width, height), new Point2D(), rawImage); } else { throw new RawDecoderException("ORF Decoder: Don't know how to handle the encoding in this file\n"); } } /* This is probably the slowest decoder of them all. * I cannot see any way to effectively speed up the prediction * phase, which is by far the slowest part of this algorithm. * Also there is no way to multithread this code, since prediction * is based on the output of all previous pixel (bar the first four) */ private void DecodeCompressed(ImageBinaryReader s, uint width, uint height) { int nbits; long left0, nw0, left1, nw1; long sign, low, high; long[] acarry0 = new long[3], acarry1 = new long[3]; long pred, diff; //uint pitch = rawImage.pitch; /* Build a table to quickly look up "high" value */ byte[] bittable = new byte[4096]; for (int i = 0; i < 4096; i++) { int b = i; for (high = 0; high < 12; high++) if (((b >> (11 - (int)high)) & 1) != 0) break; bittable[i] = (byte)Math.Min(12, high); } left0 = nw0 = left1 = nw1 = 0; s.ReadBytes(7); BitPumpMSB bits = new BitPumpMSB(s); for (int y = 0; y < height; y++) { var pos = y * rawImage.fullSize.UncroppedDim.width; acarry0 = new long[3]; acarry1 = new long[3]; bool y_border = y < 2; bool border = true; for (int x = 0; x < width; x++) { bits.Fill(); int i = 0; if (acarry0[2] < 3) i = 2; for (nbits = 2 + i; acarry0[0] >> (nbits + i) != 0; nbits++) ; uint b = bits.PeekBits(15); sign = (b >> 14) * -1; low = (b >> 12) & 3; high = bittable[b & 4095]; // Skip bytes used above or read bits if (high == 12) { bits.SkipBits(15); high = bits.GetBits(16 - nbits) >> 1; } else { bits.SkipBits((int)high + 1 + 3); } acarry0[0] = (high << nbits) | bits.GetBits(nbits); diff = (acarry0[0] ^ sign) + acarry0[1]; acarry0[1] = (diff * 3 + acarry0[1]) >> 5; acarry0[2] = acarry0[0] > 16 ? 0 : acarry0[2] + 1; if (border) { if (y_border && x < 2) pred = 0; else if (y_border) pred = left0; else { pred = nw0 = rawImage.fullSize.rawView[pos - rawImage.fullSize.UncroppedDim.width + x]; } rawImage.fullSize.rawView[pos + x] = (ushort)(pred + ((diff << 2) | low)); // Set predictor left0 = rawImage.fullSize.rawView[pos + x]; } else { // Have local variables for values used several tiles // (having a "UInt16 *dst_up" that caches dest[-pitch+((int)x)] is actually slower, probably stack spill or aliasing) int up = rawImage.fullSize.rawView[pos - rawImage.fullSize.UncroppedDim.width + x]; long leftMinusNw = left0 - nw0; long upMinusNw = up - nw0; // Check if sign is different, and one is not zero if (leftMinusNw * upMinusNw < 0) { if (Other_abs(leftMinusNw) > 32 || Other_abs(upMinusNw) > 32) pred = left0 + upMinusNw; else pred = (left0 + up) >> 1; } else pred = Other_abs(leftMinusNw) > Other_abs(upMinusNw) ? left0 : up; rawImage.fullSize.rawView[pos + x] = (ushort)(pred + ((diff << 2) | low)); // Set predictors left0 = rawImage.fullSize.rawView[pos + x]; nw0 = up; } // ODD PIXELS x += 1; bits.Fill(); i = 0; if (acarry1[2] < 3) i = 2; for (nbits = 2 + i; acarry1[0] >> (nbits + i) != 0; nbits++) ; b = bits.PeekBits(15); sign = (b >> 14) * -1; low = (b >> 12) & 3; high = bittable[b & 4095]; // Skip bytes used above or read bits if (high == 12) { bits.SkipBits(15); high = bits.GetBits(16 - nbits) >> 1; } else { bits.SkipBits((int)high + 1 + 3); } acarry1[0] = (high << nbits) | bits.GetBits(nbits); diff = (acarry1[0] ^ sign) + acarry1[1]; acarry1[1] = (diff * 3 + acarry1[1]) >> 5; acarry1[2] = acarry1[0] > 16 ? 0 : acarry1[2] + 1; if (border) { if (y_border && x < 2) pred = 0; else if (y_border) pred = left1; else { pred = nw1 = rawImage.fullSize.rawView[pos - rawImage.fullSize.UncroppedDim.width + x]; } rawImage.fullSize.rawView[pos + x] = (ushort)(left1 = pred + ((diff << 2) | low)); } else { int up = rawImage.fullSize.rawView[pos - rawImage.fullSize.UncroppedDim.width + x]; long leftminusNw = left1 - nw1; long upminusNw = up - nw1; // Check if sign is different, and one is not zero if (leftminusNw * upminusNw < 0) { if (Other_abs(leftminusNw) > 32 || Other_abs(upminusNw) > 32) pred = left1 + upminusNw; else pred = (left1 + up) >> 1; } else pred = Other_abs(leftminusNw) > Other_abs(upminusNw) ? left1 : up; rawImage.fullSize.rawView[pos + x] = (ushort)(left1 = pred + ((diff << 2) | low)); nw1 = up; } border = y_border; } } } public override void DecodeMetadata() { base.DecodeMetadata(); if (rawImage.metadata.Model == null) throw new RawDecoderException("ORF Meta Decoder: Model name found"); SetMetaData(rawImage.metadata.Model); rawImage.metadata.Lens = ifd.GetEntryRecursive((TagType)42036)?.DataAsString; var rMul = ifd.GetEntryRecursive(TagType.OLYMPUSREDMULTIPLIER); var bMul = ifd.GetEntryRecursive(TagType.OLYMPUSBLUEMULTIPLIER); if (rMul != null && bMul != null) { rawImage.metadata.WbCoeffs = new WhiteBalance( ifd.GetEntryRecursive(TagType.OLYMPUSREDMULTIPLIER).GetShort(0), 1, ifd.GetEntryRecursive(TagType.OLYMPUSREDMULTIPLIER).GetShort(0)); } else { IFD image_processing = ifd.GetIFDWithType(IFDType.Makernote).subIFD[0]; Tag wb = image_processing.GetEntry((TagType)0x0100); // Get the WB if (wb?.dataCount == 2 || wb?.dataCount == 4) { rawImage.metadata.WbCoeffs = new WhiteBalance(wb.GetInt(0), 256, wb.GetInt(1), rawImage.fullSize.ColorDepth); } //TODO fix (the sub makernote doesn't read the correct value rawImage.metadata.WbCoeffs = new WhiteBalance(1, 1, 1); Tag blackEntry = image_processing.GetEntry((TagType)0x0600); // Get the black levels if (blackEntry != null) { Debug.Assert(blackEntry.GetInt(0) == blackEntry.GetInt(1)); rawImage.black = blackEntry.GetInt(0); // Order is assumed to be RGGB if (blackEntry.dataCount == 4) { //blackEntry.parent_offset = img_entry.parent_offset - 12; //blackEntry.offsetFromParent(); /*for (int i = 0; i < 4; i++) { if (rawImage.colorFilter.cfa[(i & 1) * 2 + i >> 1] == CFAColor.Red) rawImage.blackLevelSeparate[i] = blackEntry.GetShort(0); else if (rawImage.colorFilter.cfa[(i & 1) * 2 + i >> 1] == CFAColor.Blue) rawImage.blackLevelSeparate[i] = blackEntry.GetShort(3); else if (rawImage.colorFilter.cfa[(i & 1) * 2 + i >> 1] == CFAColor.Green && i < 2) rawImage.blackLevelSeparate[i] = blackEntry.GetShort(1); else if (rawImage.colorFilter.cfa[(i & 1) * 2 + i >> 1] == CFAColor.Green) rawImage.blackLevelSeparate[i] = blackEntry.GetShort(2); }*/ // Adjust whitelevel based on the read black (we assume the dynamic range is the same) //rawImage.whitePoint -= rawImage.black - rawImage.bla[0]; } } } } private void SetMetaData(string model) { //find the color matrice for (int i = 0; i < colorM.Length; i++) { if (colorM[i].name.Contains(rawImage.metadata.Model)) { rawImage.convertionM = colorM[i].matrix; if (colorM[i].black != 0) rawImage.black = colorM[i].black; if (colorM[i].white != 0) rawImage.whitePoint = colorM[i].white; break; } } } private CamRGB[] colorM = { /*{ "Olympus AIR A01", 0, 0, { 8992,-3093,-639,-2563,10721,2122,-437,1270,5473 }), { "Olympus C5050", 0, 0, { 10508,-3124,-1273,-6079,14294,1901,-1653,2306,6237 }), { "Olympus C5060", 0, 0, { 10445,-3362,-1307,-7662,15690,2058,-1135,1176,7602 }), { "Olympus C7070", 0, 0, { 10252,-3531,-1095,-7114,14850,2436,-1451,1723,6365 }), { "Olympus C70", 0, 0, { 10793,-3791,-1146,-7498,15177,2488,-1390,1577,7321 }), { "Olympus C80", 0, 0, { 8606,-2509,-1014,-8238,15714,2703,-942,979,7760 }),*/ new CamRGB("Olympus E-10", 0, 0xffc, new double[] { 12745,-4500,-1416,-6062,14542,1580,-1934,2256,6603 } ), new CamRGB( "Olympus E-1", 0, 0, new double[] { 11846,-4767,-945,-7027,15878,1089,-2699,4122,8311 }), new CamRGB("Olympus E-20", 0, 0xffc, new double[] { 13173,-4732,-1499,-5807,14036,1895,-2045,2452,7142 } ), new CamRGB("Olympus E-300", 0, 0, new double[] { 7828,-1761,-348,-5788,14071,1830,-2853,4518,6557 } ), new CamRGB( "Olympus E-330", 0, 0, new double[]{ 8961,-2473,-1084,-7979,15990,2067,-2319,3035,8249 } ), new CamRGB( "Olympus E-30", 0, 0xfbc, new double[] { 8144,-1861,-1111,-7763,15894,1929,-1865,2542,7607 }), new CamRGB( "Olympus E-3", 0, 0xf99, new double[] { 9487,-2875,-1115,-7533,15606,2010,-1618,2100,7389 }), new CamRGB("Olympus E-400", 0, 0, new double[] { 6169,-1483,-21,-7107,14761,2536,-2904,3580,8568 }), new CamRGB( "Olympus E-410", 0, 0xf6a, new double[]{ 8856,-2582,-1026,-7761,15766,2082,-2009,2575,7469 }), new CamRGB("Olympus E-420", 0, 0xfd7, new double[] { 8746,-2425,-1095,-7594,15612,2073,-1780,2309,7416 }), new CamRGB( "Olympus E-450", 0, 0xfd2, new double[] { 8745,-2425,-1095,-7594,15613,2073,-1780,2309,7416 }), new CamRGB( "Olympus E-500", 0, 0, new double[] { 8136,-1968,-299,-5481,13742,1871,-2556,4205,6630 }), new CamRGB( "Olympus E-510", 0, 0xf6a, new double[] { 8785,-2529,-1033,-7639,15624,2112,-1783,2300,7817 }), new CamRGB( "Olympus E-520", 0, 0xfd2, new double[] { 8344,-2322,-1020,-7596,15635,2048,-1748,2269,7287 }), new CamRGB( "Olympus E-5", 0, 0xeec, new double[] { 11200,-3783,-1325,-4576,12593,2206,-695,1742,7504 }), new CamRGB( "Olympus E-600", 0, 0xfaf, new double[] { 8453,-2198,-1092,-7609,15681,2008,-1725,2337,7824 }), new CamRGB( "Olympus E-620", 0, 0xfaf, new double[] { 8453,-2198,-1092,-7609,15681,2008,-1725,2337,7824 }), new CamRGB( "Olympus E-P1", 0, 0xffd, new double[] { 8343,-2050,-1021,-7715,15705,2103,-1831,2380,8235 }), new CamRGB( "Olympus E-P2", 0, 0xffd, new double[] { 8343,-2050,-1021,-7715,15705,2103,-1831,2380,8235 }),/* { "Olympus E-P3", 0, 0, { 7575,-2159,-571,-3722,11341,2725,-1434,2819,6271 }), { "Olympus E-P5", 0, 0, { 8380,-2630,-639,-2887,10725,2496,-627,1427,5438 }), { "Olympus E-PL1s", 0, 0, { 11409,-3872,-1393,-4572,12757,2003,-709,1810,7415 }), { "Olympus E-PL1", 0, 0, { 11408,-4289,-1215,-4286,12385,2118,-387,1467,7787 }),*/ new CamRGB( "Olympus E-PL2", 0, 0xcf3, new double[] { 15030,-5552,-1806,-3987,12387,1767,-592,1670,7023 }), new CamRGB("Olympus E-PL3", 0, 0, new double[] { 7575,-2159,-571,-3722,11341,2725,-1434,2819,6271 }), new CamRGB( "Olympus E-PL5", 0, 0xfcb, new double[] { 8380,-2630,-639,-2887,10725,2496,-627,1427,5438 }),/* { "Olympus E-PL6", 0, 0, { 8380,-2630,-639,-2887,10725,2496,-627,1427,5438 }), { "Olympus E-PL7", 0, 0, { 9197,-3190,-659,-2606,10830,2039,-458,1250,5458 }), { "Olympus E-PM1", 0, 0, { 7575,-2159,-571,-3722,11341,2725,-1434,2819,6271 }), { "Olympus E-PM2", 0, 0, { 8380,-2630,-639,-2887,10725,2496,-627,1427,5438 }), { "Olympus E-M10", 0, 0,// also E-M10 Mark II { 8380,-2630,-639,-2887,10725,2496,-627,1427,5438 }), { "Olympus E-M1", 0, 0, { 7687,-1984,-606,-4327,11928,2721,-1381,2339,6452 }), { "Olympus E-M5MarkII", 0, 0, { 9422,-3258,-711,-2655,10898,2015,-512,1354,5512 }),*/ new CamRGB( "Olympus E-M5", 0, 0xfe1, new double[] { 8380,-2630,-639,-2887,10725,2496,-627,1427,5438 }), /* { "Olympus PEN-F", 0, 0, { 9476,-3182,-765,-2613,10958,1893,-449,1315,5268 }), { "Olympus SH-2", 0, 0, { 10156,-3425,-1077,-2611,11177,1624,-385,1592,5080 }), { "Olympus SP350", 0, 0, { 12078,-4836,-1069,-6671,14306,2578,-786,939,7418 }), { "Olympus SP3", 0, 0, { 11766,-4445,-1067,-6901,14421,2707,-1029,1217,7572 }),*/ new CamRGB( "Olympus SP500UZ", 0, 0xfff, new double[] { 9493,-3415,-666,-5211,12334,3260,-1548,2262,6482 }), new CamRGB( "Olympus SP510UZ", 0, 0xffe, new double[] { 10593,-3607,-1010,-5881,13127,3084,-1200,1805,6721 }), new CamRGB( "Olympus SP550UZ", 0, 0xffe, new double[] { 11597,-4006,-1049,-5432,12799,2957,-1029,1750,6516 }), new CamRGB( "Olympus SP560UZ", 0, 0xff9, new double[] { 10915,-3677,-982,-5587,12986,2911,-1168,1968,6223 }),/* { "Olympus SP570UZ", 0, 0, { 11522,-4044,-1146,-4736,12172,2904,-988,1829,6039 }), { "Olympus STYLUS1", 0, 0, { 8360,-2420,-880,-3928,12353,1739,-1381,2416,5173 }), { "Olympus TG-4", 0, 0, { 11426,-4159,-1126,-2066,10678,1593,-120,1327,4998 }), { "Olympus XZ-10", 0, 0, { 9777,-3483,-925,-2886,11297,1800,-602,1663,5134 }), { "Olympus XZ-1", 0, 0, { 10901,-4095,-1074,-1141,9208,2293,-62,1417,5158 }), { "Olympus XZ-2", 0, 0, { 9777,-3483,-925,-2886,11297,1800,-602,1663,5134 }),};*/ }; } }
using System; using System.IO; using System.Threading; using Htc.Vita.Core.Log; using Htc.Vita.Core.Util; namespace Htc.Vita.Core.Crypto { /// <summary> /// Class Sha256. /// </summary> public abstract partial class Sha256 { /// <summary> /// Gets the Base64 form length. /// </summary> /// <value>The Base64 form length.</value> public static int Base64FormLength => 44; // "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU=" /// <summary> /// Gets the hexadecimal form length. /// </summary> /// <value>The hexadecimal form length.</value> public static int HexFormLength => 64; // "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" static Sha256() { TypeRegistry.RegisterDefault<Sha256, DefaultSha256>(); } /// <summary> /// Registers the instance type. /// </summary> /// <typeparam name="T"></typeparam> public static void Register<T>() where T : Sha256, new() { TypeRegistry.Register<Sha256, T>(); } /// <summary> /// Gets the instance. /// </summary> /// <returns>Sha256.</returns> public static Sha256 GetInstance() { return TypeRegistry.GetInstance<Sha256>(); } /// <summary> /// Gets the instance. /// </summary> /// <typeparam name="T"></typeparam> /// <returns>Sha256.</returns> public static Sha256 GetInstance<T>() where T : Sha256, new() { return TypeRegistry.GetInstance<Sha256, T>(); } /// <summary> /// Generates the checksum value in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <returns>System.String.</returns> public string GenerateInBase64(FileInfo file) { return GenerateInBase64( file, CancellationToken.None ); } /// <summary> /// Generates the checksum value in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> public string GenerateInBase64( FileInfo file, CancellationToken cancellationToken) { if (file == null || !file.Exists) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInBase64( file, cancellationToken ); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha256)).Warn("Generating checksum in base64 cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Generating checksum in base64 error: {e}"); } return result; } /// <summary> /// Generates the checksum value in Base64 form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> public string GenerateInBase64(string content) { if (content == null) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInBase64(content); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Generating checksum in base64 error: {e}"); } return result; } /// <summary> /// Generates the checksum value in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <returns>System.String.</returns> public string GenerateInHex(FileInfo file) { return GenerateInHex( file, CancellationToken.None ); } /// <summary> /// Generates the checksum value in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> public string GenerateInHex( FileInfo file, CancellationToken cancellationToken) { if (file == null || !file.Exists) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInHex( file, cancellationToken ); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha256)).Warn("Generating checksum in hex cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Generating checksum in hex error: {e}"); } return result; } /// <summary> /// Generates the checksum value in hexadecimal form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> public string GenerateInHex(string content) { if (content == null) { return string.Empty; } var result = string.Empty; try { result = OnGenerateInHex(content); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Generating checksum in hex error: {e}"); } return result; } /// <summary> /// Validates the file in all checksum form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInAll( FileInfo file, string checksum) { return ValidateInAll( file, checksum, CancellationToken.None ); } /// <summary> /// Validates the file in all checksum form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInAll( FileInfo file, string checksum, CancellationToken cancellationToken) { if (string.IsNullOrWhiteSpace(checksum)) { return false; } if (checksum.Length == Base64FormLength) { return ValidateInBase64( file, checksum, cancellationToken ); } if (checksum.Length == HexFormLength) { return ValidateInHex( file, checksum, cancellationToken ); } return false; } /// <summary> /// Validates the content in all checksum form. /// </summary> /// <param name="content">The content.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInAll( string content, string checksum) { if (string.IsNullOrWhiteSpace(checksum)) { return false; } if (checksum.Length == HexFormLength) { return ValidateInHex( content, checksum ); } return ValidateInBase64( content, checksum ); } /// <summary> /// Validates the file in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInBase64( FileInfo file, string checksum) { return ValidateInBase64( file, checksum, CancellationToken.None ); } /// <summary> /// Validates the file in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInBase64( FileInfo file, string checksum, CancellationToken cancellationToken) { if (file == null || !file.Exists || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.Equals(OnGenerateInBase64( file, cancellationToken )); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha256)).Warn("Validating checksum in base64 cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Validating checksum in base64 error: {e}"); } return result; } /// <summary> /// Validates the file in Base64 form. /// </summary> /// <param name="content">The content.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInBase64( string content, string checksum) { if (content == null || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.Equals(OnGenerateInBase64(content)); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Validating checksum in base64 error: {e}"); } return result; } /// <summary> /// Validates the file in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInHex( FileInfo file, string checksum) { return ValidateInHex( file, checksum, CancellationToken.None ); } /// <summary> /// Validates the file in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="checksum">The checksum.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns><c>true</c> if form, <c>false</c> otherwise.</returns> public bool ValidateInHex( FileInfo file, string checksum, CancellationToken cancellationToken) { if (file == null || !file.Exists || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.ToLowerInvariant().Equals(OnGenerateInHex( file, cancellationToken )); } catch (OperationCanceledException) { Logger.GetInstance(typeof(Sha256)).Warn("Validating checksum in hex cancelled"); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Validating checksum in hex error: {e}"); } return result; } /// <summary> /// Validates the content in hexadecimal form. /// </summary> /// <param name="content">The content.</param> /// <param name="checksum">The checksum.</param> /// <returns><c>true</c> if valid, <c>false</c> otherwise.</returns> public bool ValidateInHex( string content, string checksum) { if (content == null || string.IsNullOrWhiteSpace(checksum)) { return false; } var result = false; try { result = checksum.ToLowerInvariant().Equals(OnGenerateInHex(content)); } catch (Exception e) { Logger.GetInstance(typeof(Sha256)).Fatal($"Validating checksum in hex error: {e}"); } return result; } /// <summary> /// Called when generating the checksum in Base64 form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInBase64( FileInfo file, CancellationToken cancellationToken ); /// <summary> /// Called when generating the checksum in Base64 form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInBase64(string content); /// <summary> /// Called when generating the checksum in hexadecimal form. /// </summary> /// <param name="file">The file.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInHex( FileInfo file, CancellationToken cancellationToken ); /// <summary> /// Called when generating the checksum in hexadecimal form. /// </summary> /// <param name="content">The content.</param> /// <returns>System.String.</returns> protected abstract string OnGenerateInHex(string content); } }
// This source file is adapted from the Windows Presentation Foundation project. // (https://github.com/dotnet/wpf/) // // Licensed to The Avalonia Project under MIT License, courtesy of The .NET Foundation. using System; namespace Avalonia.Controls { /// <summary> /// Defines the available docking modes for a control in a <see cref="DockPanel"/>. /// </summary> public enum Dock { Left = 0, Bottom, Right, Top } /// <summary> /// A panel which arranges its children at the top, bottom, left, right or center. /// </summary> public class DockPanel : Panel { /// <summary> /// Defines the Dock attached property. /// </summary> public static readonly AttachedProperty<Dock> DockProperty = AvaloniaProperty.RegisterAttached<DockPanel, Control, Dock>("Dock"); /// <summary> /// Defines the <see cref="LastChildFill"/> property. /// </summary> public static readonly StyledProperty<bool> LastChildFillProperty = AvaloniaProperty.Register<DockPanel, bool>( nameof(LastChildFillProperty), defaultValue: true); /// <summary> /// Initializes static members of the <see cref="DockPanel"/> class. /// </summary> static DockPanel() { AffectsParentMeasure<DockPanel>(DockProperty); } /// <summary> /// Gets the value of the Dock attached property on the specified control. /// </summary> /// <param name="control">The control.</param> /// <returns>The Dock attached property.</returns> public static Dock GetDock(Control control) { return control.GetValue(DockProperty); } /// <summary> /// Sets the value of the Dock attached property on the specified control. /// </summary> /// <param name="control">The control.</param> /// <param name="value">The value of the Dock property.</param> public static void SetDock(Control control, Dock value) { control.SetValue(DockProperty, value); } /// <summary> /// Gets or sets a value which indicates whether the last child of the /// <see cref="DockPanel"/> fills the remaining space in the panel. /// </summary> public bool LastChildFill { get { return GetValue(LastChildFillProperty); } set { SetValue(LastChildFillProperty, value); } } /// <summary> /// Updates DesiredSize of the DockPanel. Called by parent Control. This is the first pass of layout. /// </summary> /// <remarks> /// Children are measured based on their sizing properties and <see cref="Dock" />. /// Each child is allowed to consume all of the space on the side on which it is docked; Left/Right docked /// children are granted all vertical space for their entire width, and Top/Bottom docked children are /// granted all horizontal space for their entire height. /// </remarks> /// <param name="constraint">Constraint size is an "upper limit" that the return value should not exceed.</param> /// <returns>The Panel's desired size.</returns> protected override Size MeasureOverride(Size constraint) { var children = Children; double parentWidth = 0; // Our current required width due to children thus far. double parentHeight = 0; // Our current required height due to children thus far. double accumulatedWidth = 0; // Total width consumed by children. double accumulatedHeight = 0; // Total height consumed by children. for (int i = 0, count = children.Count; i < count; ++i) { var child = children[i]; Size childConstraint; // Contains the suggested input constraint for this child. Size childDesiredSize; // Contains the return size from child measure. if (child == null) { continue; } // Child constraint is the remaining size; this is total size minus size consumed by previous children. childConstraint = new Size(Math.Max(0.0, constraint.Width - accumulatedWidth), Math.Max(0.0, constraint.Height - accumulatedHeight)); // Measure child. child.Measure(childConstraint); childDesiredSize = child.DesiredSize; // Now, we adjust: // 1. Size consumed by children (accumulatedSize). This will be used when computing subsequent // children to determine how much space is remaining for them. // 2. Parent size implied by this child (parentSize) when added to the current children (accumulatedSize). // This is different from the size above in one respect: A Dock.Left child implies a height, but does // not actually consume any height for subsequent children. // If we accumulate size in a given dimension, the next child (or the end conditions after the child loop) // will deal with computing our minimum size (parentSize) due to that accumulation. // Therefore, we only need to compute our minimum size (parentSize) in dimensions that this child does // not accumulate: Width for Top/Bottom, Height for Left/Right. switch (DockPanel.GetDock((Control)child)) { case Dock.Left: case Dock.Right: parentHeight = Math.Max(parentHeight, accumulatedHeight + childDesiredSize.Height); accumulatedWidth += childDesiredSize.Width; break; case Dock.Top: case Dock.Bottom: parentWidth = Math.Max(parentWidth, accumulatedWidth + childDesiredSize.Width); accumulatedHeight += childDesiredSize.Height; break; } } // Make sure the final accumulated size is reflected in parentSize. parentWidth = Math.Max(parentWidth, accumulatedWidth); parentHeight = Math.Max(parentHeight, accumulatedHeight); return (new Size(parentWidth, parentHeight)); } /// <summary> /// DockPanel computes a position and final size for each of its children based upon their /// <see cref="Dock" /> enum and sizing properties. /// </summary> /// <param name="arrangeSize">Size that DockPanel will assume to position children.</param> protected override Size ArrangeOverride(Size arrangeSize) { var children = Children; int totalChildrenCount = children.Count; int nonFillChildrenCount = totalChildrenCount - (LastChildFill ? 1 : 0); double accumulatedLeft = 0; double accumulatedTop = 0; double accumulatedRight = 0; double accumulatedBottom = 0; for (int i = 0; i < totalChildrenCount; ++i) { var child = children[i]; if (child == null) { continue; } Size childDesiredSize = child.DesiredSize; Rect rcChild = new Rect( accumulatedLeft, accumulatedTop, Math.Max(0.0, arrangeSize.Width - (accumulatedLeft + accumulatedRight)), Math.Max(0.0, arrangeSize.Height - (accumulatedTop + accumulatedBottom))); if (i < nonFillChildrenCount) { switch (DockPanel.GetDock((Control)child)) { case Dock.Left: accumulatedLeft += childDesiredSize.Width; rcChild = rcChild.WithWidth(childDesiredSize.Width); break; case Dock.Right: accumulatedRight += childDesiredSize.Width; rcChild = rcChild.WithX(Math.Max(0.0, arrangeSize.Width - accumulatedRight)); rcChild = rcChild.WithWidth(childDesiredSize.Width); break; case Dock.Top: accumulatedTop += childDesiredSize.Height; rcChild = rcChild.WithHeight(childDesiredSize.Height); break; case Dock.Bottom: accumulatedBottom += childDesiredSize.Height; rcChild = rcChild.WithY(Math.Max(0.0, arrangeSize.Height - accumulatedBottom)); rcChild = rcChild.WithHeight(childDesiredSize.Height); break; } } child.Arrange(rcChild); } return (arrangeSize); } } }
namespace LuaInterface { using System; using System.Runtime.InteropServices; using System.Text; #pragma warning disable 414 public class MonoPInvokeCallbackAttribute : System.Attribute { private Type type; public MonoPInvokeCallbackAttribute(Type t) { type = t; } } #pragma warning restore 414 public enum LuaTypes : int { LUA_TNONE = -1, LUA_TNIL = 0, LUA_TBOOLEAN = 1, LUA_TLIGHTUSERDATA = 2, LUA_TNUMBER = 3, LUA_TSTRING = 4, LUA_TTABLE = 5, LUA_TFUNCTION = 6, LUA_TUSERDATA = 7, LUA_TTHREAD = 8, } public enum LuaGCOptions { LUA_GCSTOP = 0, LUA_GCRESTART = 1, LUA_GCCOLLECT = 2, LUA_GCCOUNT = 3, LUA_GCCOUNTB = 4, LUA_GCSTEP = 5, LUA_GCSETPAUSE = 6, LUA_GCSETSTEPMUL = 7, } public enum LuaThreadStatus { LUA_YIELD = 1, LUA_ERRRUN = 2, LUA_ERRSYNTAX = 3, LUA_ERRMEM = 4, LUA_ERRERR = 5, } sealed class LuaIndexes { // for lua5.1 public static int LUA_REGISTRYINDEX = -10000; // for lua5.2 //public static int LUA_REGISTRYINDEX = -1000000 - 1000; public static int LUA_GLOBALSINDEX = -10002; } [StructLayout(LayoutKind.Sequential)] public struct ReaderInfo { public String chunkData; public bool finished; } public delegate int LuaCSFunction(IntPtr luaState); public delegate string LuaChunkReader(IntPtr luaState, ref ReaderInfo data, ref uint size); public delegate int LuaFunctionCallback(IntPtr luaState); public class LuaDLL { public static int LUA_MULTRET = -1; #if UNITY_IPHONE && !UNITY_EDITOR const string LUADLL = "__Internal"; #else const string LUADLL = "slua"; #endif // Thread Funcs [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_tothread(IntPtr L, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_xmove(IntPtr from, IntPtr to, int n); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_yield(IntPtr L, int nresults); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr lua_newthread(IntPtr L); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_resume(IntPtr L, int narg); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_status(IntPtr L); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_pushthread(IntPtr L); public static int luaL_getn(IntPtr luaState, int i) { return (int)LuaDLL.lua_rawlen(luaState, i); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_gc(IntPtr luaState, LuaGCOptions what, int data); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr lua_typename(IntPtr luaState, int type); public static string lua_typenamestr(IntPtr luaState, LuaTypes type) { IntPtr p = lua_typename(luaState, (int)type); return Marshal.PtrToStringAnsi(p); } public static string luaL_typename(IntPtr luaState, int stackPos) { return LuaDLL.lua_typenamestr(luaState, LuaDLL.lua_type(luaState, stackPos)); } public static int lua_isfunction(IntPtr luaState, int stackPos) { return Convert.ToInt32(lua_type(luaState, stackPos) == LuaTypes.LUA_TFUNCTION); } public static int lua_islightuserdata(IntPtr luaState, int stackPos) { return Convert.ToInt32(lua_type(luaState, stackPos) == LuaTypes.LUA_TLIGHTUSERDATA); } public static int lua_istable(IntPtr luaState, int stackPos) { return Convert.ToInt32(lua_type(luaState, stackPos) == LuaTypes.LUA_TTABLE); } public static int lua_isthread(IntPtr luaState, int stackPos) { return Convert.ToInt32(lua_type(luaState, stackPos) == LuaTypes.LUA_TTHREAD); } public static void luaL_error(IntPtr luaState, string message) { LuaDLL.luaL_where(luaState, 1); LuaDLL.lua_pushstring(luaState, message); LuaDLL.lua_concat(luaState, 2); LuaDLL.lua_error(luaState); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern string luaL_gsub(IntPtr luaState, string str, string pattern, string replacement); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_getfenv(IntPtr luaState, int stackPos); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_isuserdata(IntPtr luaState, int stackPos); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_lessthan(IntPtr luaState, int stackPos1, int stackPos2); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_rawequal(IntPtr luaState, int stackPos1, int stackPos2); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_setfenv(IntPtr luaState, int stackPos); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_setfield(IntPtr luaState, int stackPos, string name); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_callmeta(IntPtr luaState, int stackPos, string name); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr luaL_newstate(); /// <summary>DEPRECATED - use luaL_newstate() instead!</summary> public static IntPtr lua_open() { return LuaDLL.luaL_newstate(); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_close(IntPtr luaState); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void luaL_openlibs(IntPtr luaState); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_objlen(IntPtr luaState, int stackPos); public static int lua_rawlen(IntPtr luaState, int stackPos) { return lua_objlen(luaState, stackPos); } public static int lua_strlen(IntPtr luaState, int stackPos) { return lua_rawlen(luaState, stackPos); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_loadstring(IntPtr luaState, string chunk); public static int luaL_dostring(IntPtr luaState, string chunk) { int result = LuaDLL.luaL_loadstring(luaState, chunk); if (result != 0) return result; return LuaDLL.lua_pcall(luaState, 0, -1, 0); } /// <summary>DEPRECATED - use luaL_dostring(IntPtr luaState, string chunk) instead!</summary> public static int lua_dostring(IntPtr luaState, string chunk) { return LuaDLL.luaL_dostring(luaState, chunk); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_createtable(IntPtr luaState, int narr, int nrec); public static void lua_newtable(IntPtr luaState) { LuaDLL.lua_createtable(luaState, 0, 0); } public static int luaL_dofile(IntPtr luaState, string fileName) { int result = LuaDLL.luaL_loadfile(luaState, fileName); if (result != 0) return result; return LuaDLL.lua_pcall(luaState, 0, -1, 0); } public static void lua_getglobal(IntPtr luaState, string name) { LuaDLL.lua_pushstring(luaState, name); LuaDLL.lua_gettable(luaState, LuaIndexes.LUA_GLOBALSINDEX); } // [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] // public static extern void lua_getglobal(IntPtr luaState, string name public static void lua_setglobal(IntPtr luaState, string name) { LuaDLL.lua_pushstring(luaState, name); LuaDLL.lua_insert(luaState, -2); LuaDLL.lua_settable(luaState, LuaIndexes.LUA_GLOBALSINDEX); } // [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] // public static extern void lua_setglobal(IntPtr luaState, string name [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_settop(IntPtr luaState, int newTop); public static void lua_pop(IntPtr luaState, int amount) { LuaDLL.lua_settop(luaState, -(amount) - 1); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_insert(IntPtr luaState, int newTop); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_remove(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_gettable(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_rawget(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_settable(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_rawset(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_setmetatable(IntPtr luaState, int objIndex); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_getmetatable(IntPtr luaState, int objIndex); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_equal(IntPtr luaState, int index1, int index2); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushvalue(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_replace(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_gettop(IntPtr luaState); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern LuaTypes lua_type(IntPtr luaState, int index); public static bool lua_isnil(IntPtr luaState, int index) { return (LuaDLL.lua_type(luaState, index) == LuaTypes.LUA_TNIL); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern bool lua_isnumber(IntPtr luaState, int index); public static bool lua_isboolean(IntPtr luaState, int index) { return LuaDLL.lua_type(luaState, index) == LuaTypes.LUA_TBOOLEAN; } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_ref(IntPtr luaState, int registryIndex); public static int lua_ref(IntPtr luaState, int lockRef) { if (lockRef != 0) { return LuaDLL.luaL_ref(luaState, LuaIndexes.LUA_REGISTRYINDEX); } else return 0; } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_rawgeti(IntPtr luaState, int tableIndex, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_rawseti(IntPtr luaState, int tableIndex, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr lua_newuserdata(IntPtr luaState, int size); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr lua_touserdata(IntPtr luaState, int index); public static void lua_getref(IntPtr luaState, int reference) { LuaDLL.lua_rawgeti(luaState, LuaIndexes.LUA_REGISTRYINDEX, reference); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void luaL_unref(IntPtr luaState, int registryIndex, int reference); public static void lua_unref(IntPtr luaState, int reference) { LuaDLL.luaL_unref(luaState, LuaIndexes.LUA_REGISTRYINDEX, reference); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern bool lua_isstring(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern bool lua_iscfunction(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushnil(IntPtr luaState); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushstdcallcfunction(IntPtr luaState, IntPtr wrapper); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void luaL_checktype(IntPtr luaState, int p, LuaTypes t); public static void lua_pushstdcallcfunction(IntPtr luaState, LuaCSFunction function) { IntPtr fn = Marshal.GetFunctionPointerForDelegate(function); lua_pushstdcallcfunction(luaState, fn); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] //public static extern int lua_callk(IntPtr luaState, int nArgs, int nResults,int ctx,IntPtr k); public static extern int lua_call(IntPtr luaState, int nArgs, int nResults); // public static int lua_call(IntPtr luaState, int nArgs, int nResults) // { // return lua_callk(luaState, nArgs, nResults, 0, IntPtr.Zero); // [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] //public static extern int lua_pcallk(IntPtr luaState, int nArgs, int nResults, int errfunc,int ctx,IntPtr k); public static extern int lua_pcall(IntPtr luaState, int nArgs, int nResults, int errfunc); // public static int lua_pcall(IntPtr luaState, int nArgs, int nResults, int errfunc) // { // IntPtr fn = IntPtr.Zero;// Marshal.GetFunctionPointerForDelegate(lua_errorReport); // return lua_pcallk(luaState, nArgs, nResults, errfunc, 0, fn); // [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr lua_tocfunction(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] //public static extern double lua_tonumberx(IntPtr luaState, int index, IntPtr x); public static extern double lua_tonumber(IntPtr luaState, int index); // public static double lua_tonumber(IntPtr luaState, int index) // { // return lua_tonumberx(luaState, index, IntPtr.Zero); // [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] //public static extern int lua_tointegerx(IntPtr luaState, int index,IntPtr x); public static extern int lua_tointeger(IntPtr luaState, int index); // public static int lua_tointeger(IntPtr luaState, int index) // { // return lua_tointegerx(luaState, index, IntPtr.Zero); // [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern bool lua_toboolean(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr lua_tolstring(IntPtr luaState, int index, out int strLen); public static string lua_tostring(IntPtr luaState, int index) { int strlen; IntPtr str = lua_tolstring(luaState, index, out strlen); if (str != IntPtr.Zero) { return Marshal.PtrToStringAnsi(str, strlen); } return null; } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_atpanic(IntPtr luaState, LuaCSFunction panicf); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushinteger(IntPtr luaState, int i); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushnumber(IntPtr luaState, double number); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushboolean(IntPtr luaState, bool value); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushlstring(IntPtr luaState, string str, int size); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushstring(IntPtr luaState, string str); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_newmetatable(IntPtr luaState, string meta); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_getfield(IntPtr luaState, int stackPos, string meta); public static void luaL_getmetatable(IntPtr luaState, string meta) { LuaDLL.lua_getfield(luaState, LuaIndexes.LUA_REGISTRYINDEX, meta); } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern IntPtr luaL_checkudata(IntPtr luaState, int stackPos, string meta); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern bool luaL_getmetafield(IntPtr luaState, int stackPos, string field); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_load(IntPtr luaState, LuaChunkReader chunkReader, ref ReaderInfo data, string chunkName); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_loadbuffer(IntPtr luaState, byte[] buff, int size, string name); // public static int luaL_loadbuffer(IntPtr luaState, byte[] buff, int size, string name) // { // return luaL_loadbufferx(luaState, buff, size, name, IntPtr.Zero); // } [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_loadfile(IntPtr luaState, string filename); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_error(IntPtr luaState); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern bool lua_checkstack(IntPtr luaState, int extra); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int lua_next(IntPtr luaState, int index); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_pushlightuserdata(IntPtr luaState, IntPtr udata); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void luaL_where(IntPtr luaState, int level); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaL_checkinteger(IntPtr luaState, int stackPos); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern double luaL_checknumber(IntPtr luaState, int stackPos); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern void lua_concat(IntPtr luaState, int n); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaS_newudata(IntPtr luaState, int val); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaS_rawnetobj(IntPtr luaState, int obj); [DllImport(LUADLL, CallingConvention = CallingConvention.Cdecl)] public static extern int luaS_checkcallback(IntPtr luaState, int index); } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using Mono.Addins; using Nini.Config; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.CoreModules.World.Wind; namespace OpenSim.Region.CoreModules { public class WindModule : IWindModule { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private uint m_frame = 0; private uint m_frameLastUpdateClientArray = 0; private int m_frameUpdateRate = 150; //private Random m_rndnums = new Random(Environment.TickCount); private Scene m_scene = null; private bool m_ready = false; private bool m_enabled = false; private IWindModelPlugin m_activeWindPlugin = null; private const string m_dWindPluginName = "SimpleRandomWind"; private Dictionary<string, IWindModelPlugin> m_availableWindPlugins = new Dictionary<string, IWindModelPlugin>(); // Simplified windSpeeds based on the fact that the client protocal tracks at a resolution of 16m private Vector2[] windSpeeds = new Vector2[16 * 16]; #region IRegion Methods public void Initialise(Scene scene, IConfigSource config) { IConfig windConfig = config.Configs["Wind"]; string desiredWindPlugin = m_dWindPluginName; if (windConfig != null) { m_enabled = windConfig.GetBoolean("enabled", true); m_frameUpdateRate = windConfig.GetInt("wind_update_rate", 150); // Determine which wind model plugin is desired if (windConfig.Contains("wind_plugin")) { desiredWindPlugin = windConfig.GetString("wind_plugin"); } } if (m_enabled) { m_log.InfoFormat("[WIND] Enabled with an update rate of {0} frames.", m_frameUpdateRate); m_scene = scene; m_frame = 0; // Register all the Wind Model Plug-ins foreach (IWindModelPlugin windPlugin in AddinManager.GetExtensionObjects("/OpenSim/WindModule", false)) { m_log.InfoFormat("[WIND] Found Plugin: {0}", windPlugin.Name); m_availableWindPlugins.Add(windPlugin.Name, windPlugin); } // Check for desired plugin if (m_availableWindPlugins.ContainsKey(desiredWindPlugin)) { m_activeWindPlugin = m_availableWindPlugins[desiredWindPlugin]; m_log.InfoFormat("[WIND] {0} plugin found, initializing.", desiredWindPlugin); if (windConfig != null) { m_activeWindPlugin.Initialise(); m_activeWindPlugin.WindConfig(m_scene, windConfig); } } // if the plug-in wasn't found, default to no wind. if (m_activeWindPlugin == null) { m_log.ErrorFormat("[WIND] Could not find specified wind plug-in: {0}", desiredWindPlugin); m_log.ErrorFormat("[WIND] Defaulting to no wind."); } // This one puts an entry in the main help screen m_scene.AddCommand(this, String.Empty, "wind", "Usage: wind <plugin> <param> [value] - Get or Update Wind paramaters", null); // This one enables the ability to type just the base command without any parameters m_scene.AddCommand(this, "wind", "", "", HandleConsoleCommand); // Get a list of the parameters for each plugin foreach (IWindModelPlugin windPlugin in m_availableWindPlugins.Values) { m_scene.AddCommand(this, String.Format("wind base wind_plugin {0}", windPlugin.Name), String.Format("{0} - {1}", windPlugin.Name, windPlugin.Description), "", HandleConsoleBaseCommand); m_scene.AddCommand(this, String.Format("wind base wind_update_rate"), "Change the wind update rate.", "", HandleConsoleBaseCommand); foreach (KeyValuePair<string, string> kvp in windPlugin.WindParams()) { m_scene.AddCommand(this, String.Format("wind {0} {1}", windPlugin.Name, kvp.Key), String.Format("{0} : {1} - {2}", windPlugin.Name, kvp.Key, kvp.Value), "", HandleConsoleParamCommand); } } // Register event handlers for when Avatars enter the region, and frame ticks m_scene.EventManager.OnFrame += WindUpdate; m_scene.EventManager.OnMakeRootAgent += OnAgentEnteredRegion; // Register the wind module m_scene.RegisterModuleInterface<IWindModule>(this); // Generate initial wind values GenWindPos(); // Mark Module Ready for duty m_ready = true; } } public void PostInitialise() { } public void Close() { if (m_enabled) { m_ready = false; // REVIEW: If a region module is closed, is there a possibility that it'll re-open/initialize ?? m_activeWindPlugin = null; foreach (IWindModelPlugin windPlugin in m_availableWindPlugins.Values) { windPlugin.Dispose(); } m_availableWindPlugins.Clear(); // Remove our hooks m_scene.EventManager.OnFrame -= WindUpdate; m_scene.EventManager.OnMakeRootAgent -= OnAgentEnteredRegion; } } public string Name { get { return "WindModule"; } } public bool IsSharedModule { get { return false; } } #endregion #region Console Commands private void ValidateConsole() { if (m_scene.ConsoleScene() == null) { // FIXME: If console region is root then this will be printed by every module. Currently, there is no // way to prevent this, short of making the entire module shared (which is complete overkill). // One possibility is to return a bool to signal whether the module has completely handled the command m_log.InfoFormat("[WIND]: Please change to a specific region in order to set Sun parameters."); return; } if (m_scene.ConsoleScene() != m_scene) { m_log.InfoFormat("[WIND]: Console Scene is not my scene."); return; } } /// <summary> /// Base console command handler, only used if a person specifies the base command with now options /// </summary> private void HandleConsoleCommand(string module, string[] cmdparams) { ValidateConsole(); m_log.Info("[WIND] The wind command can be used to change the currently active wind model plugin and update the parameters for wind plugins."); } /// <summary> /// Called to change the active wind model plugin /// </summary> private void HandleConsoleBaseCommand(string module, string[] cmdparams) { ValidateConsole(); if ((cmdparams.Length != 4) || !cmdparams[1].Equals("base")) { m_log.Info("[WIND] Invalid parameters to change parameters for Wind module base, usage: wind base <parameter> <value>"); return; } switch (cmdparams[2]) { case "wind_update_rate": int newRate = 1; if (int.TryParse(cmdparams[3], out newRate)) { m_frameUpdateRate = newRate; } else { m_log.InfoFormat("[WIND] Invalid value {0} specified for {1}", cmdparams[3], cmdparams[2]); return; } break; case "wind_plugin": string desiredPlugin = cmdparams[3]; if (desiredPlugin.Equals(m_activeWindPlugin.Name)) { m_log.InfoFormat("[WIND] Wind model plugin {0} is already active", cmdparams[3]); return; } if (m_availableWindPlugins.ContainsKey(desiredPlugin)) { m_activeWindPlugin = m_availableWindPlugins[cmdparams[3]]; m_log.InfoFormat("[WIND] {0} wind model plugin now active", m_activeWindPlugin.Name); } else { m_log.InfoFormat("[WIND] Could not find wind model plugin {0}", desiredPlugin); } break; } } /// <summary> /// Called to change plugin parameters. /// </summary> private void HandleConsoleParamCommand(string module, string[] cmdparams) { ValidateConsole(); // wind <plugin> <param> [value] if ((cmdparams.Length != 4) && (cmdparams.Length != 3)) { m_log.Info("[WIND] Usage: wind <plugin> <param> [value]"); return; } string plugin = cmdparams[1]; string param = cmdparams[2]; float value = 0f; if (cmdparams.Length == 4) { if (!float.TryParse(cmdparams[3], out value)) { m_log.InfoFormat("[WIND] Invalid value {0}", cmdparams[3]); } try { WindParamSet(plugin, param, value); } catch (Exception e) { m_log.InfoFormat("[WIND] {0}", e.Message); } } else { try { value = WindParamGet(plugin, param); m_log.InfoFormat("[WIND] {0} : {1}", param, value); } catch (Exception e) { m_log.InfoFormat("[WIND] {0}", e.Message); } } } #endregion #region IWindModule Methods /// <summary> /// Retrieve the wind speed at the given region coordinate. This /// implimentation ignores Z. /// </summary> /// <param name="x">0...255</param> /// <param name="y">0...255</param> public Vector3 WindSpeed(int x, int y, int z) { if (m_activeWindPlugin != null) { return m_activeWindPlugin.WindSpeed(x, y, z); } else { return new Vector3(0.0f, 0.0f, 0.0f); } } public void WindParamSet(string plugin, string param, float value) { if (m_availableWindPlugins.ContainsKey(plugin)) { IWindModelPlugin windPlugin = m_availableWindPlugins[plugin]; windPlugin.WindParamSet(param, value); m_log.InfoFormat("[WIND] {0} set to {1}", param, value); } else { throw new Exception(String.Format("Could not find plugin {0}", plugin)); } } public float WindParamGet(string plugin, string param) { if (m_availableWindPlugins.ContainsKey(plugin)) { IWindModelPlugin windPlugin = m_availableWindPlugins[plugin]; return windPlugin.WindParamGet(param); } else { throw new Exception(String.Format("Could not find plugin {0}", plugin)); } } public string WindActiveModelPluginName { get { if (m_activeWindPlugin != null) { return m_activeWindPlugin.Name; } else { return String.Empty; } } } #endregion /// <summary> /// Called on each frame update. Updates the wind model and clients as necessary. /// </summary> public void WindUpdate() { if (((m_frame++ % m_frameUpdateRate) != 0) || !m_ready) { return; } GenWindPos(); SendWindAllClients(); } public void OnAgentEnteredRegion(ScenePresence avatar) { if (m_ready) { if (m_activeWindPlugin != null) { // Ask wind plugin to generate a LL wind array to be cached locally // Try not to update this too often, as it may involve array copies if (m_frame >= (m_frameLastUpdateClientArray + m_frameUpdateRate)) { windSpeeds = m_activeWindPlugin.WindLLClientArray(); m_frameLastUpdateClientArray = m_frame; } } avatar.ControllingClient.SendWindData(windSpeeds); } } private void SendWindAllClients() { if (m_ready) { if(m_scene.GetRootAgentCount() > 0) { // Ask wind plugin to generate a LL wind array to be cached locally // Try not to update this too often, as it may involve array copies if (m_frame >= (m_frameLastUpdateClientArray + m_frameUpdateRate)) { windSpeeds = m_activeWindPlugin.WindLLClientArray(); m_frameLastUpdateClientArray = m_frame; } m_scene.ForEachScenePresence(delegate(ScenePresence sp) { if (!sp.IsChildAgent) sp.ControllingClient.SendWindData(windSpeeds); }); } } } /// <summary> /// Calculate the sun's orbital position and its velocity. /// </summary> private void GenWindPos() { if (m_activeWindPlugin != null) { // Tell Wind Plugin to update it's wind data m_activeWindPlugin.WindUpdate(m_frame); } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Text; using System.Threading; using System.Threading.Tasks; using System.Xml.Linq; using Hyak.Common; using Microsoft.Azure; using Microsoft.WindowsAzure.Management.SiteRecovery; using Microsoft.WindowsAzure.Management.SiteRecovery.Models; namespace Microsoft.WindowsAzure.Management.SiteRecovery { /// <summary> /// Definition of vault extended info operations for the Site Recovery /// extension. /// </summary> internal partial class VaultExtendedInfoOperations : IServiceOperations<SiteRecoveryManagementClient>, IVaultExtendedInfoOperations { /// <summary> /// Initializes a new instance of the VaultExtendedInfoOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> internal VaultExtendedInfoOperations(SiteRecoveryManagementClient client) { this._client = client; } private SiteRecoveryManagementClient _client; /// <summary> /// Gets a reference to the /// Microsoft.WindowsAzure.Management.SiteRecovery.SiteRecoveryManagementClient. /// </summary> public SiteRecoveryManagementClient Client { get { return this._client; } } /// <summary> /// Get the vault extended info. /// </summary> /// <param name='extendedInfoArgs'> /// Required. Create resource exnteded info input parameters. /// </param> /// <param name='customRequestHeaders'> /// Optional. Request header parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> public async Task<AzureOperationResponse> CreateExtendedInfoAsync(ResourceExtendedInformationArgs extendedInfoArgs, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken) { // Validate if (extendedInfoArgs == null) { throw new ArgumentNullException("extendedInfoArgs"); } if (extendedInfoArgs.ContractVersion == null) { throw new ArgumentNullException("extendedInfoArgs.ContractVersion"); } if (extendedInfoArgs.ExtendedInfo == null) { throw new ArgumentNullException("extendedInfoArgs.ExtendedInfo"); } if (extendedInfoArgs.ExtendedInfoETag == null) { throw new ArgumentNullException("extendedInfoArgs.ExtendedInfoETag"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("extendedInfoArgs", extendedInfoArgs); tracingParameters.Add("customRequestHeaders", customRequestHeaders); TracingAdapter.Enter(invocationId, this, "CreateExtendedInfoAsync", tracingParameters); } // Construct URL string url = ""; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/cloudservices/"; url = url + Uri.EscapeDataString(this.Client.CloudServiceName); url = url + "/resources/"; url = url + "WAHyperVRecoveryManager"; url = url + "/~/"; url = url + "HyperVRecoveryManagerVault"; url = url + "/"; url = url + Uri.EscapeDataString(this.Client.ResourceName); url = url + "/ExtendedInfo"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Put; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/xml"); httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId); httpRequest.Headers.Add("x-ms-version", "2013-03-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; XDocument requestDoc = new XDocument(); XElement resourceExtendedInformationArgsElement = new XElement(XName.Get("ResourceExtendedInformationArgs", "http://schemas.microsoft.com/windowsazure")); requestDoc.Add(resourceExtendedInformationArgsElement); XElement contractVersionElement = new XElement(XName.Get("ContractVersion", "http://schemas.microsoft.com/windowsazure")); contractVersionElement.Value = extendedInfoArgs.ContractVersion; resourceExtendedInformationArgsElement.Add(contractVersionElement); XElement extendedInfoElement = new XElement(XName.Get("ExtendedInfo", "http://schemas.microsoft.com/windowsazure")); extendedInfoElement.Value = extendedInfoArgs.ExtendedInfo; resourceExtendedInformationArgsElement.Add(extendedInfoElement); XElement extendedInfoETagElement = new XElement(XName.Get("ExtendedInfoETag", "http://schemas.microsoft.com/windowsazure")); extendedInfoETagElement.Value = extendedInfoArgs.ExtendedInfoETag; resourceExtendedInformationArgsElement.Add(extendedInfoETagElement); requestContent = requestDoc.ToString(); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.NoContent) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result AzureOperationResponse result = null; // Deserialize Response result = new AzureOperationResponse(); result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Get the vault extended info. /// </summary> /// <param name='customRequestHeaders'> /// Optional. Request header parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the resource extended information object /// </returns> public async Task<ResourceExtendedInformationResponse> GetExtendedInfoAsync(CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken) { // Validate // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("customRequestHeaders", customRequestHeaders); TracingAdapter.Enter(invocationId, this, "GetExtendedInfoAsync", tracingParameters); } // Construct URL string url = ""; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/cloudservices/"; url = url + Uri.EscapeDataString(this.Client.CloudServiceName); url = url + "/resources/"; url = url + "WAHyperVRecoveryManager"; url = url + "/~/"; url = url + "HyperVRecoveryManagerVault"; url = url + "/"; url = url + Uri.EscapeDataString(this.Client.ResourceName); url = url + "/ExtendedInfo"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/xml"); httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId); httpRequest.Headers.Add("x-ms-version", "2013-03-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result ResourceExtendedInformationResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new ResourceExtendedInformationResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement resourceExtendedInformationElement = responseDoc.Element(XName.Get("ResourceExtendedInformation", "http://schemas.microsoft.com/windowsazure")); if (resourceExtendedInformationElement != null) { ResourceExtendedInformation resourceExtendedInformationInstance = new ResourceExtendedInformation(); result.ResourceExtendedInformation = resourceExtendedInformationInstance; XElement cloudServiceNameElement = resourceExtendedInformationElement.Element(XName.Get("CloudServiceName", "http://schemas.microsoft.com/windowsazure")); if (cloudServiceNameElement != null) { string cloudServiceNameInstance = cloudServiceNameElement.Value; resourceExtendedInformationInstance.CloudServiceName = cloudServiceNameInstance; } XElement contractVersionElement = resourceExtendedInformationElement.Element(XName.Get("ContractVersion", "http://schemas.microsoft.com/windowsazure")); if (contractVersionElement != null) { string contractVersionInstance = contractVersionElement.Value; resourceExtendedInformationInstance.ContractVersion = contractVersionInstance; } XElement extendedInfoElement = resourceExtendedInformationElement.Element(XName.Get("ExtendedInfo", "http://schemas.microsoft.com/windowsazure")); if (extendedInfoElement != null) { string extendedInfoInstance = extendedInfoElement.Value; resourceExtendedInformationInstance.ExtendedInfo = extendedInfoInstance; } XElement extendedInfoETagElement = resourceExtendedInformationElement.Element(XName.Get("ExtendedInfoETag", "http://schemas.microsoft.com/windowsazure")); if (extendedInfoETagElement != null) { string extendedInfoETagInstance = extendedInfoETagElement.Value; resourceExtendedInformationInstance.ExtendedInfoETag = extendedInfoETagInstance; } XElement resourceIdElement = resourceExtendedInformationElement.Element(XName.Get("ResourceId", "http://schemas.microsoft.com/windowsazure")); if (resourceIdElement != null) { long resourceIdInstance = long.Parse(resourceIdElement.Value, CultureInfo.InvariantCulture); resourceExtendedInformationInstance.ResourceId = resourceIdInstance; } XElement resourceNameElement = resourceExtendedInformationElement.Element(XName.Get("ResourceName", "http://schemas.microsoft.com/windowsazure")); if (resourceNameElement != null) { string resourceNameInstance = resourceNameElement.Value; resourceExtendedInformationInstance.ResourceName = resourceNameInstance; } XElement resourceTypeElement = resourceExtendedInformationElement.Element(XName.Get("ResourceType", "http://schemas.microsoft.com/windowsazure")); if (resourceTypeElement != null) { string resourceTypeInstance = resourceTypeElement.Value; resourceExtendedInformationInstance.ResourceType = resourceTypeInstance; } XElement subscriptionIdElement = resourceExtendedInformationElement.Element(XName.Get("SubscriptionId", "http://schemas.microsoft.com/windowsazure")); if (subscriptionIdElement != null) { Guid subscriptionIdInstance = Guid.Parse(subscriptionIdElement.Value); resourceExtendedInformationInstance.SubscriptionId = subscriptionIdInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Get the vault extended info. /// </summary> /// <param name='extendedInfoArgs'> /// Required. Update resource exnteded info input parameters. /// </param> /// <param name='customRequestHeaders'> /// Optional. Request header parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the resource extended information object /// </returns> public async Task<ResourceExtendedInformationResponse> UpdateExtendedInfoAsync(ResourceExtendedInformationArgs extendedInfoArgs, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken) { // Validate if (extendedInfoArgs == null) { throw new ArgumentNullException("extendedInfoArgs"); } if (extendedInfoArgs.ContractVersion == null) { throw new ArgumentNullException("extendedInfoArgs.ContractVersion"); } if (extendedInfoArgs.ExtendedInfo == null) { throw new ArgumentNullException("extendedInfoArgs.ExtendedInfo"); } if (extendedInfoArgs.ExtendedInfoETag == null) { throw new ArgumentNullException("extendedInfoArgs.ExtendedInfoETag"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("extendedInfoArgs", extendedInfoArgs); tracingParameters.Add("customRequestHeaders", customRequestHeaders); TracingAdapter.Enter(invocationId, this, "UpdateExtendedInfoAsync", tracingParameters); } // Construct URL string url = ""; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/cloudservices/"; url = url + Uri.EscapeDataString(this.Client.CloudServiceName); url = url + "/resources/"; url = url + "WAHyperVRecoveryManager"; url = url + "/~/"; url = url + "HyperVRecoveryManagerVault"; url = url + "/"; url = url + Uri.EscapeDataString(this.Client.ResourceName); url = url + "/ExtendedInfo"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/xml"); httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId); httpRequest.Headers.Add("x-ms-version", "2013-03-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; XDocument requestDoc = new XDocument(); XElement resourceExtendedInformationArgsElement = new XElement(XName.Get("ResourceExtendedInformationArgs", "http://schemas.microsoft.com/windowsazure")); requestDoc.Add(resourceExtendedInformationArgsElement); XElement contractVersionElement = new XElement(XName.Get("ContractVersion", "http://schemas.microsoft.com/windowsazure")); contractVersionElement.Value = extendedInfoArgs.ContractVersion; resourceExtendedInformationArgsElement.Add(contractVersionElement); XElement extendedInfoElement = new XElement(XName.Get("ExtendedInfo", "http://schemas.microsoft.com/windowsazure")); extendedInfoElement.Value = extendedInfoArgs.ExtendedInfo; resourceExtendedInformationArgsElement.Add(extendedInfoElement); XElement extendedInfoETagElement = new XElement(XName.Get("ExtendedInfoETag", "http://schemas.microsoft.com/windowsazure")); extendedInfoETagElement.Value = extendedInfoArgs.ExtendedInfoETag; resourceExtendedInformationArgsElement.Add(extendedInfoETagElement); requestContent = requestDoc.ToString(); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result ResourceExtendedInformationResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new ResourceExtendedInformationResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement resourceExtendedInformationElement = responseDoc.Element(XName.Get("ResourceExtendedInformation", "http://schemas.microsoft.com/windowsazure")); if (resourceExtendedInformationElement != null) { ResourceExtendedInformation resourceExtendedInformationInstance = new ResourceExtendedInformation(); result.ResourceExtendedInformation = resourceExtendedInformationInstance; XElement cloudServiceNameElement = resourceExtendedInformationElement.Element(XName.Get("CloudServiceName", "http://schemas.microsoft.com/windowsazure")); if (cloudServiceNameElement != null) { string cloudServiceNameInstance = cloudServiceNameElement.Value; resourceExtendedInformationInstance.CloudServiceName = cloudServiceNameInstance; } XElement contractVersionElement2 = resourceExtendedInformationElement.Element(XName.Get("ContractVersion", "http://schemas.microsoft.com/windowsazure")); if (contractVersionElement2 != null) { string contractVersionInstance = contractVersionElement2.Value; resourceExtendedInformationInstance.ContractVersion = contractVersionInstance; } XElement extendedInfoElement2 = resourceExtendedInformationElement.Element(XName.Get("ExtendedInfo", "http://schemas.microsoft.com/windowsazure")); if (extendedInfoElement2 != null) { string extendedInfoInstance = extendedInfoElement2.Value; resourceExtendedInformationInstance.ExtendedInfo = extendedInfoInstance; } XElement extendedInfoETagElement2 = resourceExtendedInformationElement.Element(XName.Get("ExtendedInfoETag", "http://schemas.microsoft.com/windowsazure")); if (extendedInfoETagElement2 != null) { string extendedInfoETagInstance = extendedInfoETagElement2.Value; resourceExtendedInformationInstance.ExtendedInfoETag = extendedInfoETagInstance; } XElement resourceIdElement = resourceExtendedInformationElement.Element(XName.Get("ResourceId", "http://schemas.microsoft.com/windowsazure")); if (resourceIdElement != null) { long resourceIdInstance = long.Parse(resourceIdElement.Value, CultureInfo.InvariantCulture); resourceExtendedInformationInstance.ResourceId = resourceIdInstance; } XElement resourceNameElement = resourceExtendedInformationElement.Element(XName.Get("ResourceName", "http://schemas.microsoft.com/windowsazure")); if (resourceNameElement != null) { string resourceNameInstance = resourceNameElement.Value; resourceExtendedInformationInstance.ResourceName = resourceNameInstance; } XElement resourceTypeElement = resourceExtendedInformationElement.Element(XName.Get("ResourceType", "http://schemas.microsoft.com/windowsazure")); if (resourceTypeElement != null) { string resourceTypeInstance = resourceTypeElement.Value; resourceExtendedInformationInstance.ResourceType = resourceTypeInstance; } XElement subscriptionIdElement = resourceExtendedInformationElement.Element(XName.Get("SubscriptionId", "http://schemas.microsoft.com/windowsazure")); if (subscriptionIdElement != null) { Guid subscriptionIdInstance = Guid.Parse(subscriptionIdElement.Value); resourceExtendedInformationInstance.SubscriptionId = subscriptionIdInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// Get the vault extended info. /// </summary> /// <param name='certificateArgs'> /// Required. Upload Vault Certificate input parameters. /// </param> /// <param name='customRequestHeaders'> /// Optional. Request header parameters. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response model for the upload certificate response /// </returns> public async Task<UploadCertificateResponse> UploadCertificateAsync(CertificateArgs certificateArgs, CustomRequestHeaders customRequestHeaders, CancellationToken cancellationToken) { // Validate if (certificateArgs == null) { throw new ArgumentNullException("certificateArgs"); } if (certificateArgs.Certificate == null) { throw new ArgumentNullException("certificateArgs.Certificate"); } if (certificateArgs.ContractVersion == null) { throw new ArgumentNullException("certificateArgs.ContractVersion"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("certificateArgs", certificateArgs); tracingParameters.Add("customRequestHeaders", customRequestHeaders); TracingAdapter.Enter(invocationId, this, "UploadCertificateAsync", tracingParameters); } // Construct URL string url = ""; if (this.Client.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId); } url = url + "/cloudservices/"; url = url + Uri.EscapeDataString(this.Client.CloudServiceName); url = url + "/resources/"; url = url + "WAHyperVRecoveryManager"; url = url + "/~/"; url = url + "HyperVRecoveryManagerVault"; url = url + "/"; url = url + Uri.EscapeDataString(this.Client.ResourceName); url = url + "/management/certificate"; string baseUrl = this.Client.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Post; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("Accept", "application/xml"); httpRequest.Headers.Add("x-ms-client-request-id", customRequestHeaders.ClientRequestId); httpRequest.Headers.Add("x-ms-version", "2013-03-01"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Serialize Request string requestContent = null; XDocument requestDoc = new XDocument(); XElement certificateArgsElement = new XElement(XName.Get("CertificateArgs", "http://schemas.microsoft.com/windowsazure")); requestDoc.Add(certificateArgsElement); XElement contractVersionElement = new XElement(XName.Get("ContractVersion", "http://schemas.microsoft.com/windowsazure")); contractVersionElement.Value = certificateArgs.ContractVersion; certificateArgsElement.Add(contractVersionElement); XElement certificateElement = new XElement(XName.Get("certificate", "http://schemas.microsoft.com/windowsazure")); certificateElement.Value = certificateArgs.Certificate; certificateArgsElement.Add(certificateElement); requestContent = requestDoc.ToString(); httpRequest.Content = new StringContent(requestContent, Encoding.UTF8); httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/xml"); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result UploadCertificateResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new UploadCertificateResponse(); XDocument responseDoc = XDocument.Parse(responseContent); XElement uploadCertificateResponseElement = responseDoc.Element(XName.Get("UploadCertificateResponse", "http://schemas.microsoft.com/windowsazure")); if (uploadCertificateResponseElement != null) { XElement contractVersionElement2 = uploadCertificateResponseElement.Element(XName.Get("ContractVersion", "http://schemas.microsoft.com/windowsazure")); if (contractVersionElement2 != null) { string contractVersionInstance = contractVersionElement2.Value; result.ContractVersion = contractVersionInstance; } XElement globalAcsHostNameElement = uploadCertificateResponseElement.Element(XName.Get("GlobalAcsHostName", "http://schemas.microsoft.com/windowsazure")); if (globalAcsHostNameElement != null) { string globalAcsHostNameInstance = globalAcsHostNameElement.Value; result.GlobalAcsHostName = globalAcsHostNameInstance; } XElement globalAcsNamespaceElement = uploadCertificateResponseElement.Element(XName.Get("GlobalAcsNamespace", "http://schemas.microsoft.com/windowsazure")); if (globalAcsNamespaceElement != null) { string globalAcsNamespaceInstance = globalAcsNamespaceElement.Value; result.GlobalAcsNamespace = globalAcsNamespaceInstance; } XElement globalAcsRPRealmElement = uploadCertificateResponseElement.Element(XName.Get("GlobalAcsRPRealm", "http://schemas.microsoft.com/windowsazure")); if (globalAcsRPRealmElement != null) { string globalAcsRPRealmInstance = globalAcsRPRealmElement.Value; result.GlobalAcsRPRealm = globalAcsRPRealmInstance; } XElement resourceIdElement = uploadCertificateResponseElement.Element(XName.Get("ResourceId", "http://schemas.microsoft.com/windowsazure")); if (resourceIdElement != null) { long resourceIdInstance = long.Parse(resourceIdElement.Value, CultureInfo.InvariantCulture); result.ResourceId = resourceIdInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
// Copyright 1998-2015 Epic Games, Inc. All Rights Reserved. using System; using System.Collections.Generic; using System.IO; using System.Diagnostics; using System.Xml; using System.Text.RegularExpressions; using System.Linq; using System.Reflection; using System.Threading; namespace UnrealBuildTool { public class SNDBS { static private int MaxActionsToExecuteInParallel; static private int JobNumber; /** The possible result of executing tasks with SN-DBS. */ public enum ExecutionResult { Unavailable, TasksFailed, TasksSucceeded, } /** * Used when debugging Actions outputs all action return values to debug out * * @param sender Sending object * @param e Event arguments (In this case, the line of string output) */ static protected void ActionDebugOutput(object sender, DataReceivedEventArgs e) { var Output = e.Data; if (Output == null) { return; } Log.TraceInformation(Output); } internal static ExecutionResult ExecuteLocalActions(List<Action> InLocalActions, Dictionary<Action, ActionThread> InActionThreadDictionary, int TotalNumJobs) { // Time to sleep after each iteration of the loop in order to not busy wait. const float LoopSleepTime = 0.1f; ExecutionResult LocalActionsResult = ExecutionResult.TasksSucceeded; while (true) { // Count the number of pending and still executing actions. int NumUnexecutedActions = 0; int NumExecutingActions = 0; foreach (Action Action in InLocalActions) { ActionThread ActionThread = null; bool bFoundActionProcess = InActionThreadDictionary.TryGetValue(Action, out ActionThread); if (bFoundActionProcess == false) { NumUnexecutedActions++; } else if (ActionThread != null) { if (ActionThread.bComplete == false) { NumUnexecutedActions++; NumExecutingActions++; } } } // If there aren't any pending actions left, we're done executing. if (NumUnexecutedActions == 0) { break; } // If there are fewer actions executing than the maximum, look for pending actions that don't have any outdated // prerequisites. foreach (Action Action in InLocalActions) { ActionThread ActionProcess = null; bool bFoundActionProcess = InActionThreadDictionary.TryGetValue(Action, out ActionProcess); if (bFoundActionProcess == false) { if (NumExecutingActions < Math.Max(1, MaxActionsToExecuteInParallel)) { // Determine whether there are any prerequisites of the action that are outdated. bool bHasOutdatedPrerequisites = false; bool bHasFailedPrerequisites = false; foreach (FileItem PrerequisiteItem in Action.PrerequisiteItems) { if (PrerequisiteItem.ProducingAction != null && InLocalActions.Contains(PrerequisiteItem.ProducingAction)) { ActionThread PrerequisiteProcess = null; bool bFoundPrerequisiteProcess = InActionThreadDictionary.TryGetValue(PrerequisiteItem.ProducingAction, out PrerequisiteProcess); if (bFoundPrerequisiteProcess == true) { if (PrerequisiteProcess == null) { bHasFailedPrerequisites = true; } else if (PrerequisiteProcess.bComplete == false) { bHasOutdatedPrerequisites = true; } else if (PrerequisiteProcess.ExitCode != 0) { bHasFailedPrerequisites = true; } } else { bHasOutdatedPrerequisites = true; } } } // If there are any failed prerequisites of this action, don't execute it. if (bHasFailedPrerequisites) { // Add a null entry in the dictionary for this action. InActionThreadDictionary.Add(Action, null); } // If there aren't any outdated prerequisites of this action, execute it. else if (!bHasOutdatedPrerequisites) { ActionThread ActionThread = new ActionThread(Action, JobNumber, TotalNumJobs); ActionThread.Run(); InActionThreadDictionary.Add(Action, ActionThread); NumExecutingActions++; JobNumber++; } } } } System.Threading.Thread.Sleep(TimeSpan.FromSeconds(LoopSleepTime)); } return LocalActionsResult; } internal static ExecutionResult ExecuteActions(List<Action> InActions, Dictionary<Action, ActionThread> InActionThreadDictionary) { // Build the script file that will be executed by SN-DBS StreamWriter ScriptFile; string ScriptFilename = Path.Combine(BuildConfiguration.BaseIntermediatePath, "SNDBS.bat"); FileStream ScriptFileStream = new FileStream(ScriptFilename, FileMode.Create, FileAccess.ReadWrite, FileShare.Read); ScriptFile = new StreamWriter(ScriptFileStream); ScriptFile.AutoFlush = true; int NumScriptedActions = 0; List<Action> LocalActions = new List<Action>(); ActionThread DummyActionThread = new ActionThread(null, 1, 1); foreach (Action Action in InActions) { ActionThread ActionProcess = null; bool bFoundActionProcess = InActionThreadDictionary.TryGetValue(Action, out ActionProcess); if (bFoundActionProcess == false) { // Determine whether there are any prerequisites of the action that are outdated. bool bHasOutdatedPrerequisites = false; bool bHasFailedPrerequisites = false; foreach (FileItem PrerequisiteItem in Action.PrerequisiteItems) { if (PrerequisiteItem.ProducingAction != null && InActions.Contains(PrerequisiteItem.ProducingAction)) { ActionThread PrerequisiteProcess = null; bool bFoundPrerequisiteProcess = InActionThreadDictionary.TryGetValue(PrerequisiteItem.ProducingAction, out PrerequisiteProcess); if (bFoundPrerequisiteProcess == true) { if (PrerequisiteProcess == null) { bHasFailedPrerequisites = true; } else if (PrerequisiteProcess.bComplete == false) { bHasOutdatedPrerequisites = true; } else if (PrerequisiteProcess.ExitCode != 0) { bHasFailedPrerequisites = true; } } else { bHasOutdatedPrerequisites = true; } } } // If there are any failed prerequisites of this action, don't execute it. if (bHasFailedPrerequisites) { // Add a null entry in the dictionary for this action. InActionThreadDictionary.Add(Action, null); } // If there aren't any outdated prerequisites of this action, execute it. else if (!bHasOutdatedPrerequisites) { if (Action.bCanExecuteRemotely == false) { // Execute locally LocalActions.Add(Action); } else { // Add to script for execution by SN-DBS string NewCommandArguments = "\"" + Action.CommandPath + "\"" + " " + Action.CommandArguments; ScriptFile.WriteLine(ActionThread.ExpandEnvironmentVariables(NewCommandArguments)); InActionThreadDictionary.Add(Action, DummyActionThread); Action.StartTime = Action.EndTime = DateTimeOffset.Now; Log.TraceInformation("[{0}/{1}] {2} {3}", JobNumber, InActions.Count, Action.CommandDescription, Action.StatusDescription); JobNumber++; NumScriptedActions++; } } } } ScriptFile.Flush(); ScriptFile.Close(); ScriptFile.Dispose(); ScriptFile = null; if( NumScriptedActions > 0 ) { // Create the process ProcessStartInfo PSI = new ProcessStartInfo("dbsbuild", "-q -p UE4 -s " + BuildConfiguration.BaseIntermediatePath + "/sndbs.bat"); PSI.RedirectStandardOutput = true; PSI.RedirectStandardError = true; PSI.UseShellExecute = false; PSI.CreateNoWindow = true; PSI.WorkingDirectory = Path.GetFullPath("."); ; Process NewProcess = new Process(); NewProcess.StartInfo = PSI; NewProcess.OutputDataReceived += new DataReceivedEventHandler(ActionDebugOutput); NewProcess.ErrorDataReceived += new DataReceivedEventHandler(ActionDebugOutput); DateTimeOffset StartTime = DateTimeOffset.Now; NewProcess.Start(); NewProcess.BeginOutputReadLine(); NewProcess.BeginErrorReadLine(); NewProcess.WaitForExit(); TimeSpan Duration; DateTimeOffset EndTime = DateTimeOffset.Now; if (EndTime == DateTimeOffset.MinValue) { Duration = DateTimeOffset.Now - StartTime; } else { Duration = EndTime - StartTime; } DummyActionThread.bComplete = true; int ExitCode = NewProcess.ExitCode; if (ExitCode != 0) { return ExecutionResult.TasksFailed; } else { UnrealBuildTool.TotalCompileTime += Duration.TotalSeconds; } } // Execute local tasks if( LocalActions.Count > 0 ) { return ExecuteLocalActions(LocalActions, InActionThreadDictionary, InActions.Count); } return ExecutionResult.TasksSucceeded; } public static ExecutionResult ExecuteActions(List<Action> Actions) { ExecutionResult SNDBSResult = ExecutionResult.TasksSucceeded; if (Actions.Count > 0) { string SCERoot = Environment.GetEnvironmentVariable("SCE_ROOT_DIR"); bool bSNDBSExists = false; if( SCERoot != null ) { string SNDBSExecutable = Path.Combine(SCERoot, "Common/SN-DBS/bin/dbsbuild.exe"); // Check that SN-DBS is available bSNDBSExists = File.Exists(SNDBSExecutable); } if( bSNDBSExists == false ) { return ExecutionResult.Unavailable; } // Use WMI to figure out physical cores, excluding hyper threading. int NumCores = 0; if (!Utils.IsRunningOnMono) { try { using (var Mos = new System.Management.ManagementObjectSearcher("Select * from Win32_Processor")) { var MosCollection = Mos.Get(); foreach (var Item in MosCollection) { NumCores += int.Parse(Item["NumberOfCores"].ToString()); } } } catch (Exception Ex) { Log.TraceWarning("Unable to get the number of Cores: {0}", Ex.ToString()); Log.TraceWarning("Falling back to processor count."); } } // On some systems this requires a hot fix to work so we fall back to using the (logical) processor count. if (NumCores == 0) { NumCores = System.Environment.ProcessorCount; } // The number of actions to execute in parallel is trying to keep the CPU busy enough in presence of I/O stalls. MaxActionsToExecuteInParallel = 0; // The CPU has more logical cores than physical ones, aka uses hyper-threading. if (NumCores < System.Environment.ProcessorCount) { MaxActionsToExecuteInParallel = (int)(NumCores * BuildConfiguration.ProcessorCountMultiplier); } // No hyper-threading. Only kicking off a task per CPU to keep machine responsive. else { MaxActionsToExecuteInParallel = NumCores; } MaxActionsToExecuteInParallel = Math.Min(MaxActionsToExecuteInParallel, BuildConfiguration.MaxProcessorCount); JobNumber = 1; Dictionary<Action, ActionThread> ActionThreadDictionary = new Dictionary<Action, ActionThread>(); while( true ) { bool bUnexecutedActions = false; foreach (Action Action in Actions) { ActionThread ActionThread = null; bool bFoundActionProcess = ActionThreadDictionary.TryGetValue(Action, out ActionThread); if (bFoundActionProcess == false) { bUnexecutedActions = true; ExecutionResult CompileResult = ExecuteActions(Actions, ActionThreadDictionary); if (CompileResult != ExecutionResult.TasksSucceeded) { return ExecutionResult.TasksFailed; } break; } } if (bUnexecutedActions == false) { break; } } Log.WriteLineIf(BuildConfiguration.bLogDetailedActionStats, TraceEventType.Information, "-------- Begin Detailed Action Stats ----------------------------------------------------------"); Log.WriteLineIf(BuildConfiguration.bLogDetailedActionStats, TraceEventType.Information, "^Action Type^Duration (seconds)^Tool^Task^Using PCH"); double TotalThreadSeconds = 0; // Check whether any of the tasks failed and log action stats if wanted. foreach (KeyValuePair<Action, ActionThread> ActionProcess in ActionThreadDictionary) { Action Action = ActionProcess.Key; ActionThread ActionThread = ActionProcess.Value; // Check for pending actions, preemptive failure if (ActionThread == null) { SNDBSResult = ExecutionResult.TasksFailed; continue; } // Check for executed action but general failure if (ActionThread.ExitCode != 0) { SNDBSResult = ExecutionResult.TasksFailed; } // Log CPU time, tool and task. double ThreadSeconds = Action.Duration.TotalSeconds; Log.WriteLineIf(BuildConfiguration.bLogDetailedActionStats, TraceEventType.Information, "^{0}^{1:0.00}^{2}^{3}^{4}", Action.ActionType.ToString(), ThreadSeconds, Path.GetFileName(Action.CommandPath), Action.StatusDescription, Action.bIsUsingPCH); // Update statistics switch (Action.ActionType) { case ActionType.BuildProject: UnrealBuildTool.TotalBuildProjectTime += ThreadSeconds; break; case ActionType.Compile: UnrealBuildTool.TotalCompileTime += ThreadSeconds; break; case ActionType.CreateAppBundle: UnrealBuildTool.TotalCreateAppBundleTime += ThreadSeconds; break; case ActionType.GenerateDebugInfo: UnrealBuildTool.TotalGenerateDebugInfoTime += ThreadSeconds; break; case ActionType.Link: UnrealBuildTool.TotalLinkTime += ThreadSeconds; break; default: UnrealBuildTool.TotalOtherActionsTime += ThreadSeconds; break; } // Keep track of total thread seconds spent on tasks. TotalThreadSeconds += ThreadSeconds; } Log.TraceInformation("-------- End Detailed Actions Stats -----------------------------------------------------------"); // Log total CPU seconds and numbers of processors involved in tasks. Log.WriteLineIf(BuildConfiguration.bLogDetailedActionStats || BuildConfiguration.bPrintDebugInfo, TraceEventType.Information, "Cumulative thread seconds ({0} processors): {1:0.00}", System.Environment.ProcessorCount, TotalThreadSeconds); } return SNDBSResult; } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Platform; using osu.Game.Graphics; using osu.Game.Tournament.Components; using osu.Game.Tournament.Models; using osu.Game.Tournament.Screens.Ladder.Components; using osuTK; using osuTK.Graphics; namespace osu.Game.Tournament.Screens.Schedule { public class ScheduleScreen : TournamentScreen // IProvidesVideo { private readonly Bindable<TournamentMatch> currentMatch = new Bindable<TournamentMatch>(); private Container mainContainer; private LadderInfo ladder; [BackgroundDependencyLoader] private void load(LadderInfo ladder, Storage storage) { this.ladder = ladder; RelativeSizeAxes = Axes.Both; InternalChildren = new Drawable[] { new TourneyVideo("schedule") { RelativeSizeAxes = Axes.Both, Loop = true, }, new Container { RelativeSizeAxes = Axes.Both, Padding = new MarginPadding(100) { Bottom = 50 }, Children = new Drawable[] { new GridContainer { RelativeSizeAxes = Axes.Both, RowDimensions = new[] { new Dimension(GridSizeMode.AutoSize), new Dimension(), }, Content = new[] { new Drawable[] { new FillFlowContainer { AutoSizeAxes = Axes.Both, Direction = FillDirection.Vertical, Children = new Drawable[] { new DrawableTournamentHeaderText(), new Container { Margin = new MarginPadding { Top = 40 }, AutoSizeAxes = Axes.Both, Children = new Drawable[] { new Box { Colour = Color4.White, Size = new Vector2(50, 10), }, new TournamentSpriteTextWithBackground("Schedule") { X = 60, Scale = new Vector2(0.8f) } } }, } }, }, new Drawable[] { mainContainer = new Container { RelativeSizeAxes = Axes.Both, } } } } } }, }; currentMatch.BindValueChanged(matchChanged); currentMatch.BindTo(ladder.CurrentMatch); } private void matchChanged(ValueChangedEvent<TournamentMatch> match) { if (match.NewValue == null) { mainContainer.Clear(); return; } var upcoming = ladder.Matches.Where(p => !p.Completed.Value && p.Team1.Value != null && p.Team2.Value != null && Math.Abs(p.Date.Value.DayOfYear - DateTimeOffset.UtcNow.DayOfYear) < 4); var conditionals = ladder .Matches.Where(p => !p.Completed.Value && (p.Team1.Value == null || p.Team2.Value == null) && Math.Abs(p.Date.Value.DayOfYear - DateTimeOffset.UtcNow.DayOfYear) < 4) .SelectMany(m => m.ConditionalMatches.Where(cp => m.Acronyms.TrueForAll(a => cp.Acronyms.Contains(a)))); upcoming = upcoming.Concat(conditionals); upcoming = upcoming.OrderBy(p => p.Date.Value).Take(8); mainContainer.Child = new FillFlowContainer { RelativeSizeAxes = Axes.Both, Direction = FillDirection.Vertical, Children = new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, Height = 0.74f, Child = new FillFlowContainer { RelativeSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Children = new Drawable[] { new ScheduleContainer("recent matches") { RelativeSizeAxes = Axes.Both, Width = 0.4f, ChildrenEnumerable = ladder.Matches .Where(p => p.Completed.Value && p.Team1.Value != null && p.Team2.Value != null && Math.Abs(p.Date.Value.DayOfYear - DateTimeOffset.UtcNow.DayOfYear) < 4) .OrderByDescending(p => p.Date.Value) .Take(8) .Select(p => new ScheduleMatch(p)) }, new ScheduleContainer("upcoming matches") { RelativeSizeAxes = Axes.Both, Width = 0.6f, ChildrenEnumerable = upcoming.Select(p => new ScheduleMatch(p)) }, } } }, new ScheduleContainer("coming up next") { RelativeSizeAxes = Axes.Both, Height = 0.25f, Children = new Drawable[] { new FillFlowContainer { AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(30), Children = new Drawable[] { new ScheduleMatch(match.NewValue, false) { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, new TournamentSpriteTextWithBackground(match.NewValue.Round.Value?.Name.Value) { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Scale = new Vector2(0.5f) }, new TournamentSpriteText { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Text = match.NewValue.Team1.Value?.FullName + " vs " + match.NewValue.Team2.Value?.FullName, Font = OsuFont.Torus.With(size: 24, weight: FontWeight.SemiBold) }, new FillFlowContainer { AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Children = new Drawable[] { new ScheduleMatchDate(match.NewValue.Date.Value) { Font = OsuFont.Torus.With(size: 24, weight: FontWeight.Regular) } } }, } }, } } } }; } public class ScheduleMatch : DrawableTournamentMatch { public ScheduleMatch(TournamentMatch match, bool showTimestamp = true) : base(match) { Flow.Direction = FillDirection.Horizontal; Scale = new Vector2(0.8f); CurrentMatchSelectionBox.Scale = new Vector2(1.02f, 1.15f); bool conditional = match is ConditionalTournamentMatch; if (conditional) Colour = OsuColour.Gray(0.5f); if (showTimestamp) { AddInternal(new DrawableDate(Match.Date.Value) { Anchor = Anchor.TopRight, Origin = Anchor.TopLeft, Colour = OsuColour.Gray(0.7f), Alpha = conditional ? 0.6f : 1, Font = OsuFont.Torus, Margin = new MarginPadding { Horizontal = 10, Vertical = 5 }, }); AddInternal(new TournamentSpriteText { Anchor = Anchor.BottomRight, Origin = Anchor.BottomLeft, Colour = OsuColour.Gray(0.7f), Alpha = conditional ? 0.6f : 1, Margin = new MarginPadding { Horizontal = 10, Vertical = 5 }, Text = match.Date.Value.ToUniversalTime().ToString("HH:mm UTC") + (conditional ? " (conditional)" : "") }); } } } public class ScheduleMatchDate : DrawableDate { public ScheduleMatchDate(DateTimeOffset date, float textSize = OsuFont.DEFAULT_FONT_SIZE, bool italic = true) : base(date, textSize, italic) { } protected override string Format() => Date < DateTimeOffset.Now ? $"Started {base.Format()}" : $"Starting {base.Format()}"; } public class ScheduleContainer : Container { protected override Container<Drawable> Content => content; private readonly FillFlowContainer content; public ScheduleContainer(string title) { Padding = new MarginPadding { Left = 60, Top = 10 }; InternalChildren = new Drawable[] { new FillFlowContainer { RelativeSizeAxes = Axes.Both, Direction = FillDirection.Vertical, Children = new Drawable[] { new TournamentSpriteTextWithBackground(title.ToUpperInvariant()) { Scale = new Vector2(0.5f) }, content = new FillFlowContainer { Direction = FillDirection.Vertical, RelativeSizeAxes = Axes.Both, Margin = new MarginPadding(10) }, } }, }; } } } }