context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Data.Common;
using System.Diagnostics;
using System.Globalization;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
namespace System.Data.OleDb
{
internal sealed class ColumnBinding
{
// shared with other ColumnBindings
private readonly OleDbDataReader _dataReader; // HCHAPTER
private readonly RowBinding _rowbinding; // for native buffer interaction
private readonly Bindings _bindings;
// unique to this ColumnBinding
private readonly OleDbParameter _parameter; // output value
private readonly int _parameterChangeID;
private readonly int _offsetStatus;
private readonly int _offsetLength;
private readonly int _offsetValue;
// Delegate ad hoc created 'Marshal.GetIDispatchForObject' reflection object cache
private static Func<object, IntPtr> s_getIDispatchForObject;
private readonly int _ordinal;
private readonly int _maxLen;
private readonly short _wType;
private readonly byte _precision;
private readonly int _index;
private readonly int _indexForAccessor; // HCHAPTER
private readonly int _indexWithinAccessor; // HCHAPTER
private readonly bool _ifIRowsetElseIRow;
// unique per current input value
private int _valueBindingOffset;
private int _valueBindingSize;
internal StringMemHandle _sptr;
private GCHandle _pinnedBuffer;
// value is cached via property getters so the original may be released
// for Value, ValueByteArray, ValueString, ValueVariant
private object _value;
internal ColumnBinding(OleDbDataReader dataReader, int index, int indexForAccessor, int indexWithinAccessor,
OleDbParameter parameter, RowBinding rowbinding, Bindings bindings, tagDBBINDING binding, int offset,
bool ifIRowsetElseIRow)
{
Debug.Assert(null != rowbinding, "null rowbinding");
Debug.Assert(null != bindings, "null bindings");
Debug.Assert(ODB.SizeOf_tagDBBINDING <= offset, "invalid offset" + offset);
_dataReader = dataReader;
_rowbinding = rowbinding;
_bindings = bindings;
_index = index;
_indexForAccessor = indexForAccessor;
_indexWithinAccessor = indexWithinAccessor;
if (null != parameter)
{
_parameter = parameter;
_parameterChangeID = parameter.ChangeID;
}
_offsetStatus = binding.obStatus.ToInt32() + offset;
_offsetLength = binding.obLength.ToInt32() + offset;
_offsetValue = binding.obValue.ToInt32() + offset;
Debug.Assert(0 <= _offsetStatus, "negative _offsetStatus");
Debug.Assert(0 <= _offsetLength, "negative _offsetLength");
Debug.Assert(0 <= _offsetValue, "negative _offsetValue");
_ordinal = binding.iOrdinal.ToInt32();
_maxLen = binding.cbMaxLen.ToInt32();
_wType = binding.wType;
_precision = binding.bPrecision;
_ifIRowsetElseIRow = ifIRowsetElseIRow;
SetSize(Bindings.ParamSize.ToInt32());
}
internal Bindings Bindings
{
get
{
_bindings.CurrentIndex = IndexWithinAccessor;
return _bindings;
}
}
internal RowBinding RowBinding
{
get { return _rowbinding; }
}
internal int ColumnBindingOrdinal
{
get { return _ordinal; }
}
private int ColumnBindingMaxLen
{
get { return _maxLen; }
}
private byte ColumnBindingPrecision
{
get { return _precision; }
}
private short DbType
{
get { return _wType; }
}
private Type ExpectedType
{
get { return NativeDBType.FromDBType(DbType, false, false).dataType; }
}
internal int Index
{
get { return _index; }
}
internal int IndexForAccessor
{
get { return _indexForAccessor; }
}
internal int IndexWithinAccessor
{
get { return _indexWithinAccessor; }
}
private int ValueBindingOffset
{ // offset within the value of where to start copying
get { return _valueBindingOffset; }
}
private int ValueBindingSize
{ // maximum size of the value to copy
get { return _valueBindingSize; }
}
internal int ValueOffset
{ // offset within the native buffer to put the value
get { return _offsetValue; }
}
private OleDbDataReader DataReader()
{
Debug.Assert(null != _dataReader, "null DataReader");
return _dataReader;
}
internal bool IsParameterBindingInvalid(OleDbParameter parameter)
{
Debug.Assert((null != _parameter) && (null != parameter), "null parameter");
return ((_parameter.ChangeID != _parameterChangeID) || (_parameter != parameter));
}
internal bool IsValueNull()
{
return ((DBStatus.S_ISNULL == StatusValue())
|| (((NativeDBType.VARIANT == DbType) || (NativeDBType.PROPVARIANT == DbType))
&& (Convert.IsDBNull(ValueVariant()))));
}
private int LengthValue()
{
int length;
if (_ifIRowsetElseIRow)
{
length = RowBinding.ReadIntPtr(_offsetLength).ToInt32();
}
else
{
length = Bindings.DBColumnAccess[IndexWithinAccessor].cbDataLen.ToInt32();
}
return Math.Max(length, 0);
}
private void LengthValue(int value)
{
Debug.Assert(0 <= value, "negative LengthValue");
RowBinding.WriteIntPtr(_offsetLength, (IntPtr)value);
}
internal OleDbParameter Parameter()
{
Debug.Assert(null != _parameter, "null parameter");
return _parameter;
}
internal void ResetValue()
{
_value = null;
StringMemHandle sptr = _sptr;
_sptr = null;
if (null != sptr)
{
sptr.Dispose();
}
if (_pinnedBuffer.IsAllocated)
{
_pinnedBuffer.Free();
}
}
internal DBStatus StatusValue()
{
if (_ifIRowsetElseIRow)
{
return (DBStatus)RowBinding.ReadInt32(_offsetStatus);
}
else
{
return (DBStatus)Bindings.DBColumnAccess[IndexWithinAccessor].dwStatus;
}
}
internal void StatusValue(DBStatus value)
{
#if DEBUG
switch (value)
{
case DBStatus.S_OK:
case DBStatus.S_ISNULL:
case DBStatus.S_DEFAULT:
break;
default:
Debug.Assert(false, "unexpected StatusValue");
break;
}
#endif
RowBinding.WriteInt32(_offsetStatus, (int)value);
}
internal void SetOffset(int offset)
{
if (0 > offset)
{
throw ADP.InvalidOffsetValue(offset);
}
_valueBindingOffset = Math.Max(offset, 0);
}
internal void SetSize(int size)
{
_valueBindingSize = Math.Max(size, 0);
}
private void SetValueDBNull()
{
LengthValue(0);
StatusValue(DBStatus.S_ISNULL);
RowBinding.WriteInt64(ValueOffset, 0); // safe because AlignDataSize forces 8 byte blocks
}
private void SetValueEmpty()
{
LengthValue(0);
StatusValue(DBStatus.S_DEFAULT);
RowBinding.WriteInt64(ValueOffset, 0); // safe because AlignDataSize forces 8 byte blocks
}
internal object Value()
{
object value = _value;
if (null == value)
{
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.EMPTY:
case NativeDBType.NULL:
value = DBNull.Value;
break;
case NativeDBType.I2:
value = Value_I2(); // Int16
break;
case NativeDBType.I4:
value = Value_I4(); // Int32
break;
case NativeDBType.R4:
value = Value_R4(); // Single
break;
case NativeDBType.R8:
value = Value_R8(); // Double
break;
case NativeDBType.CY:
value = Value_CY(); // Decimal
break;
case NativeDBType.DATE:
value = Value_DATE(); // DateTime
break;
case NativeDBType.BSTR:
value = Value_BSTR(); // String
break;
case NativeDBType.IDISPATCH:
value = Value_IDISPATCH(); // Object
break;
case NativeDBType.ERROR:
value = Value_ERROR(); // Int32
break;
case NativeDBType.BOOL:
value = Value_BOOL(); // Boolean
break;
case NativeDBType.VARIANT:
value = Value_VARIANT(); // Object
break;
case NativeDBType.IUNKNOWN:
value = Value_IUNKNOWN(); // Object
break;
case NativeDBType.DECIMAL:
value = Value_DECIMAL(); // Decimal
break;
case NativeDBType.I1:
value = (short)Value_I1(); // SByte->Int16
break;
case NativeDBType.UI1:
value = Value_UI1(); // Byte
break;
case NativeDBType.UI2:
value = (int)Value_UI2(); // UInt16->Int32
break;
case NativeDBType.UI4:
value = (long)Value_UI4(); // UInt32->Int64
break;
case NativeDBType.I8:
value = Value_I8(); // Int64
break;
case NativeDBType.UI8:
value = (decimal)Value_UI8(); // UInt64->Decimal
break;
case NativeDBType.FILETIME:
value = Value_FILETIME(); // DateTime
break;
case NativeDBType.GUID:
value = Value_GUID(); // Guid
break;
case NativeDBType.BYTES:
value = Value_BYTES(); // Byte[]
break;
case NativeDBType.WSTR:
value = Value_WSTR(); // String
break;
case NativeDBType.NUMERIC:
value = Value_NUMERIC(); // Decimal
break;
case NativeDBType.DBDATE:
value = Value_DBDATE(); // DateTime
break;
case NativeDBType.DBTIME:
value = Value_DBTIME(); // TimeSpan
break;
case NativeDBType.DBTIMESTAMP:
value = Value_DBTIMESTAMP(); // DateTime
break;
case NativeDBType.PROPVARIANT:
value = Value_VARIANT(); // Object
break;
case NativeDBType.HCHAPTER:
value = Value_HCHAPTER(); // OleDbDataReader
break;
case (NativeDBType.BYREF | NativeDBType.BYTES):
value = Value_ByRefBYTES();
break;
case (NativeDBType.BYREF | NativeDBType.WSTR):
value = Value_ByRefWSTR();
break;
default:
throw ODB.GVtUnknown(DbType);
#if DEBUG
case NativeDBType.STR:
Debug.Assert(false, "should have bound as WSTR");
goto default;
case NativeDBType.VARNUMERIC:
Debug.Assert(false, "should have bound as NUMERIC");
goto default;
case NativeDBType.UDT:
Debug.Assert(false, "UDT binding should not have been encountered");
goto default;
case (NativeDBType.BYREF | NativeDBType.STR):
Debug.Assert(false, "should have bound as BYREF|WSTR");
goto default;
#endif
}
break;
case DBStatus.S_TRUNCATED:
switch (DbType)
{
case NativeDBType.BYTES:
value = Value_BYTES();
break;
case NativeDBType.WSTR:
value = Value_WSTR();
break;
case (NativeDBType.BYREF | NativeDBType.BYTES):
value = Value_ByRefBYTES();
break;
case (NativeDBType.BYREF | NativeDBType.WSTR):
value = Value_ByRefWSTR();
break;
default:
throw ODB.GVtUnknown(DbType);
#if DEBUG
case NativeDBType.STR:
Debug.Assert(false, "should have bound as WSTR");
goto default;
case (NativeDBType.BYREF | NativeDBType.STR):
Debug.Assert(false, "should have bound as BYREF|WSTR");
goto default;
#endif
}
break;
case DBStatus.S_ISNULL:
case DBStatus.S_DEFAULT:
value = DBNull.Value;
break;
default:
throw CheckTypeValueStatusValue();
}
_value = value;
}
return value;
}
internal void Value(object value)
{
if (null == value)
{
SetValueEmpty();
}
else if (Convert.IsDBNull(value))
{
SetValueDBNull();
}
else
switch (DbType)
{
case NativeDBType.EMPTY:
SetValueEmpty();
break;
case NativeDBType.NULL: // language null - no representation, use DBNull
SetValueDBNull();
break;
case NativeDBType.I2:
Value_I2((short)value);
break;
case NativeDBType.I4:
Value_I4((int)value);
break;
case NativeDBType.R4:
Value_R4((float)value);
break;
case NativeDBType.R8:
Value_R8((double)value);
break;
case NativeDBType.CY:
Value_CY((decimal)value);
break;
case NativeDBType.DATE:
Value_DATE((DateTime)value);
break;
case NativeDBType.BSTR:
Value_BSTR((string)value);
break;
case NativeDBType.IDISPATCH:
Value_IDISPATCH(value);
break;
case NativeDBType.ERROR:
Value_ERROR((int)value);
break;
case NativeDBType.BOOL:
Value_BOOL((bool)value);
break;
case NativeDBType.VARIANT:
Value_VARIANT(value);
break;
case NativeDBType.IUNKNOWN:
Value_IUNKNOWN(value);
break;
case NativeDBType.DECIMAL:
Value_DECIMAL((decimal)value);
break;
case NativeDBType.I1:
if (value is short)
{
Value_I1(Convert.ToSByte((short)value, CultureInfo.InvariantCulture));
}
else
{
Value_I1((sbyte)value);
}
break;
case NativeDBType.UI1:
Value_UI1((byte)value);
break;
case NativeDBType.UI2:
if (value is int)
{
Value_UI2(Convert.ToUInt16((int)value, CultureInfo.InvariantCulture));
}
else
{
Value_UI2((ushort)value);
}
break;
case NativeDBType.UI4:
if (value is long)
{
Value_UI4(Convert.ToUInt32((long)value, CultureInfo.InvariantCulture));
}
else
{
Value_UI4((uint)value);
}
break;
case NativeDBType.I8:
Value_I8((long)value);
break;
case NativeDBType.UI8:
if (value is decimal)
{
Value_UI8(Convert.ToUInt64((decimal)value, CultureInfo.InvariantCulture));
}
else
{
Value_UI8((ulong)value);
}
break;
case NativeDBType.FILETIME:
Value_FILETIME((DateTime)value);
break;
case NativeDBType.GUID:
Value_GUID((Guid)value);
break;
case NativeDBType.BYTES:
Value_BYTES((byte[])value);
break;
case NativeDBType.WSTR:
if (value is string)
{
Value_WSTR((string)value);
}
else
{
Value_WSTR((char[])value);
}
break;
case NativeDBType.NUMERIC:
Value_NUMERIC((decimal)value);
break;
case NativeDBType.DBDATE:
Value_DBDATE((DateTime)value);
break;
case NativeDBType.DBTIME:
Value_DBTIME((TimeSpan)value);
break;
case NativeDBType.DBTIMESTAMP:
Value_DBTIMESTAMP((DateTime)value);
break;
case NativeDBType.PROPVARIANT:
Value_VARIANT(value);
break;
case (NativeDBType.BYREF | NativeDBType.BYTES):
Value_ByRefBYTES((byte[])value);
break;
case (NativeDBType.BYREF | NativeDBType.WSTR):
if (value is string)
{
Value_ByRefWSTR((string)value);
}
else
{
Value_ByRefWSTR((char[])value);
}
break;
default:
Debug.Assert(false, "unknown DBTYPE");
throw ODB.SVtUnknown(DbType);
#if DEBUG
case NativeDBType.STR:
Debug.Assert(false, "Should have bound as WSTR");
goto default;
case NativeDBType.UDT:
Debug.Assert(false, "UDT binding should not have been encountered");
goto default;
case NativeDBType.HCHAPTER:
Debug.Assert(false, "not allowed to set HCHAPTER");
goto default;
case NativeDBType.VARNUMERIC:
Debug.Assert(false, "should have bound as NUMERIC");
goto default;
case (NativeDBType.BYREF | NativeDBType.STR):
Debug.Assert(false, "should have bound as BYREF|WSTR");
goto default;
#endif
}
}
internal bool Value_BOOL()
{
Debug.Assert((NativeDBType.BOOL == DbType), "Value_BOOL");
Debug.Assert((DBStatus.S_OK == StatusValue()), "Value_BOOL");
short value = RowBinding.ReadInt16(ValueOffset);
return (0 != value);
}
private void Value_BOOL(bool value)
{
Debug.Assert((NativeDBType.BOOL == DbType), "Value_BOOL");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt16(ValueOffset, (short)(value ? ODB.VARIANT_TRUE : ODB.VARIANT_FALSE));
}
private string Value_BSTR()
{
Debug.Assert((NativeDBType.BSTR == DbType), "Value_BSTR");
Debug.Assert((DBStatus.S_OK == StatusValue()), "Value_BSTR");
string value = "";
RowBinding bindings = RowBinding;
bool mustRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
bindings.DangerousAddRef(ref mustRelease);
IntPtr ptr = bindings.ReadIntPtr(ValueOffset);
if (ADP.PtrZero != ptr)
{
value = Marshal.PtrToStringBSTR(ptr);
}
}
finally
{
if (mustRelease)
{
bindings.DangerousRelease();
}
}
return value;
}
private void Value_BSTR(string value)
{
Debug.Assert((null != value), "Value_BSTR null");
Debug.Assert((NativeDBType.BSTR == DbType), "Value_BSTR");
LengthValue(value.Length * 2); /* bytecount*/
StatusValue(DBStatus.S_OK);
RowBinding.SetBstrValue(ValueOffset, value);
}
private byte[] Value_ByRefBYTES()
{
Debug.Assert(((NativeDBType.BYREF | NativeDBType.BYTES) == DbType), "Value_ByRefBYTES");
Debug.Assert((DBStatus.S_OK == StatusValue()), "Value_ByRefBYTES");
byte[] value = null;
RowBinding bindings = RowBinding;
bool mustRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
bindings.DangerousAddRef(ref mustRelease);
IntPtr ptr = bindings.ReadIntPtr(ValueOffset);
if (ADP.PtrZero != ptr)
{
value = new byte[LengthValue()];
Marshal.Copy(ptr, value, 0, value.Length);
}
}
finally
{
if (mustRelease)
{
bindings.DangerousRelease();
}
}
return ((null != value) ? value : Array.Empty<byte>());
}
private void Value_ByRefBYTES(byte[] value)
{
Debug.Assert(null != value, "Value_ByRefBYTES null");
Debug.Assert((NativeDBType.BYREF | NativeDBType.BYTES) == DbType, "Value_ByRefBYTES");
// we expect the provider/server to apply the silent truncation when binding BY_REF
// if (value.Length < ValueBindingOffset) { throw "Offset must refer to a location within the value" }
int length = ((ValueBindingOffset < value.Length) ? (value.Length - ValueBindingOffset) : 0);
LengthValue(((0 < ValueBindingSize) ? Math.Min(ValueBindingSize, length) : length));
StatusValue(DBStatus.S_OK);
IntPtr ptr = ADP.PtrZero;
if (0 < length)
{ // avoid pinning empty byte[]
_pinnedBuffer = GCHandle.Alloc(value, GCHandleType.Pinned);
ptr = _pinnedBuffer.AddrOfPinnedObject();
ptr = ADP.IntPtrOffset(ptr, ValueBindingOffset);
}
RowBinding.SetByRefValue(ValueOffset, ptr);
}
private string Value_ByRefWSTR()
{
Debug.Assert((NativeDBType.BYREF | NativeDBType.WSTR) == DbType, "Value_ByRefWSTR");
Debug.Assert((DBStatus.S_OK == StatusValue()) || (DBStatus.S_TRUNCATED == StatusValue()), "Value_ByRefWSTR");
string value = "";
RowBinding bindings = RowBinding;
bool mustRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
bindings.DangerousAddRef(ref mustRelease);
IntPtr ptr = bindings.ReadIntPtr(ValueOffset);
if (ADP.PtrZero != ptr)
{
int charCount = LengthValue() / 2;
value = Marshal.PtrToStringUni(ptr, charCount);
}
}
finally
{
if (mustRelease)
{
bindings.DangerousRelease();
}
}
return value;
}
private void Value_ByRefWSTR(string value)
{
Debug.Assert(null != value, "Value_ByRefWSTR null");
Debug.Assert((NativeDBType.BYREF | NativeDBType.WSTR) == DbType, "Value_ByRefWSTR");
// we expect the provider/server to apply the silent truncation when binding BY_REF
// if (value.Length < ValueBindingOffset) { throw "Offset must refer to a location within the value" }
int length = ((ValueBindingOffset < value.Length) ? (value.Length - ValueBindingOffset) : 0);
LengthValue(((0 < ValueBindingSize) ? Math.Min(ValueBindingSize, length) : length) * 2); /* charcount->bytecount*/
StatusValue(DBStatus.S_OK);
IntPtr ptr = ADP.PtrZero;
if (0 < length)
{ // avoid pinning empty string, i.e String.Empty
_pinnedBuffer = GCHandle.Alloc(value, GCHandleType.Pinned);
ptr = _pinnedBuffer.AddrOfPinnedObject();
ptr = ADP.IntPtrOffset(ptr, ValueBindingOffset);
}
RowBinding.SetByRefValue(ValueOffset, ptr);
}
private void Value_ByRefWSTR(char[] value)
{
Debug.Assert(null != value, "Value_ByRefWSTR null");
Debug.Assert((NativeDBType.BYREF | NativeDBType.WSTR) == DbType, "Value_ByRefWSTR");
// we expect the provider/server to apply the silent truncation when binding BY_REF
// if (value.Length < ValueBindingOffset) { throw "Offset must refer to a location within the value" }
int length = ((ValueBindingOffset < value.Length) ? (value.Length - ValueBindingOffset) : 0);
LengthValue(((0 < ValueBindingSize) ? Math.Min(ValueBindingSize, length) : length) * 2); /* charcount->bytecount*/
StatusValue(DBStatus.S_OK);
IntPtr ptr = ADP.PtrZero;
if (0 < length)
{ // avoid pinning empty char[]
_pinnedBuffer = GCHandle.Alloc(value, GCHandleType.Pinned);
ptr = _pinnedBuffer.AddrOfPinnedObject();
ptr = ADP.IntPtrOffset(ptr, ValueBindingOffset);
}
RowBinding.SetByRefValue(ValueOffset, ptr);
}
private byte[] Value_BYTES()
{
Debug.Assert(NativeDBType.BYTES == DbType, "Value_BYTES");
Debug.Assert((DBStatus.S_OK == StatusValue()) || (DBStatus.S_TRUNCATED == StatusValue()), "Value_BYTES");
int byteCount = Math.Min(LengthValue(), ColumnBindingMaxLen);
byte[] value = new byte[byteCount];
RowBinding.ReadBytes(ValueOffset, value, 0, byteCount);
return value;
}
private void Value_BYTES(byte[] value)
{
Debug.Assert(null != value, "Value_BYTES null");
// we silently truncate when the user has specified a given Size
int bytecount = ((ValueBindingOffset < value.Length) ? Math.Min(value.Length - ValueBindingOffset, ColumnBindingMaxLen) : 0);
LengthValue(bytecount);
StatusValue(DBStatus.S_OK);
if (0 < bytecount)
{
RowBinding.WriteBytes(ValueOffset, value, ValueBindingOffset, bytecount);
}
}
private decimal Value_CY()
{
Debug.Assert(NativeDBType.CY == DbType, "Value_CY");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_CY");
return decimal.FromOACurrency(RowBinding.ReadInt64(ValueOffset));
}
private void Value_CY(decimal value)
{
Debug.Assert(NativeDBType.CY == DbType, "Value_CY");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt64(ValueOffset, decimal.ToOACurrency(value));
}
private DateTime Value_DATE()
{
Debug.Assert(NativeDBType.DATE == DbType, "Value_DATE");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_DATE");
return DateTime.FromOADate(RowBinding.ReadDouble(ValueOffset));
}
private void Value_DATE(DateTime value)
{
Debug.Assert(NativeDBType.DATE == DbType, "Value_DATE");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteDouble(ValueOffset, value.ToOADate());
}
private DateTime Value_DBDATE()
{
Debug.Assert(NativeDBType.DBDATE == DbType, "Value_DBDATE");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_DBDATE");
return RowBinding.ReadDate(ValueOffset);
}
private void Value_DBDATE(DateTime value)
{
Debug.Assert(NativeDBType.DBDATE == DbType, "Value_DATE");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteDate(ValueOffset, value);
}
private TimeSpan Value_DBTIME()
{
Debug.Assert(NativeDBType.DBTIME == DbType, "Value_DBTIME");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_DBTIME");
return RowBinding.ReadTime(ValueOffset);
}
private void Value_DBTIME(TimeSpan value)
{
Debug.Assert(NativeDBType.DBTIME == DbType, "Value_DBTIME");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteTime(ValueOffset, value);
}
private DateTime Value_DBTIMESTAMP()
{
Debug.Assert(NativeDBType.DBTIMESTAMP == DbType, "Value_DBTIMESTAMP");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_DBTIMESTAMP");
return RowBinding.ReadDateTime(ValueOffset);
}
private void Value_DBTIMESTAMP(DateTime value)
{
Debug.Assert(NativeDBType.DBTIMESTAMP == DbType, "Value_DBTIMESTAMP");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteDateTime(ValueOffset, value);
}
private decimal Value_DECIMAL()
{
Debug.Assert(NativeDBType.DECIMAL == DbType, "Value_DECIMAL");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_DECIMAL");
int[] buffer = new int[4];
RowBinding.ReadInt32Array(ValueOffset, buffer, 0, 4);
return new decimal(
buffer[2], // low
buffer[3], // mid
buffer[1], // high
(0 != (buffer[0] & unchecked((int)0x80000000))), // sign
unchecked((byte)((buffer[0] & unchecked((int)0x00FF0000)) >> 16))); // scale
}
private void Value_DECIMAL(decimal value)
{
Debug.Assert(NativeDBType.DECIMAL == DbType, "Value_DECIMAL");
/* pending breaking change approval
if (_precision < ((System.Data.SqlTypes.SqlDecimal) value).Precision) {
throw ADP.ParameterValueOutOfRange(value);
}
*/
LengthValue(0);
StatusValue(DBStatus.S_OK);
int[] tmp = decimal.GetBits(value);
int[] buffer = new int[4] {
tmp[3], tmp[2], tmp[0], tmp[1]
};
RowBinding.WriteInt32Array(ValueOffset, buffer, 0, 4);
}
private int Value_ERROR()
{
Debug.Assert(NativeDBType.ERROR == DbType, "Value_ERROR");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_ERROR");
return RowBinding.ReadInt32(ValueOffset);
}
private void Value_ERROR(int value)
{
Debug.Assert(NativeDBType.ERROR == DbType, "Value_ERROR");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt32(ValueOffset, value);
}
private DateTime Value_FILETIME()
{
Debug.Assert(NativeDBType.FILETIME == DbType, "Value_FILETIME");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_FILETIME");
long tmp = RowBinding.ReadInt64(ValueOffset);
return DateTime.FromFileTime(tmp);
}
private void Value_FILETIME(DateTime value)
{
Debug.Assert(NativeDBType.FILETIME == DbType, "Value_FILETIME");
LengthValue(0);
StatusValue(DBStatus.S_OK);
long tmp = value.ToFileTime();
RowBinding.WriteInt64(ValueOffset, tmp);
}
internal Guid Value_GUID()
{
Debug.Assert(NativeDBType.GUID == DbType, "Value_GUID");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_GUID");
return RowBinding.ReadGuid(ValueOffset);
}
private void Value_GUID(Guid value)
{
Debug.Assert(NativeDBType.GUID == DbType, "Value_GUID");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteGuid(ValueOffset, value);
}
internal OleDbDataReader Value_HCHAPTER()
{
Debug.Assert(NativeDBType.HCHAPTER == DbType, "Value_HCHAPTER");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_HCHAPTER");
return DataReader().ResetChapter(IndexForAccessor, IndexWithinAccessor, RowBinding, ValueOffset);
}
private sbyte Value_I1()
{
Debug.Assert(NativeDBType.I1 == DbType, "Value_I1");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_I1");
byte value = RowBinding.ReadByte(ValueOffset);
return unchecked((sbyte)value);
}
private void Value_I1(sbyte value)
{
Debug.Assert(NativeDBType.I1 == DbType, "Value_I1");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteByte(ValueOffset, unchecked((byte)value));
}
internal short Value_I2()
{
Debug.Assert(NativeDBType.I2 == DbType, "Value_I2");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_I2");
return RowBinding.ReadInt16(ValueOffset);
}
private void Value_I2(short value)
{
Debug.Assert(NativeDBType.I2 == DbType, "Value_I2");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt16(ValueOffset, value);
}
private int Value_I4()
{
Debug.Assert(NativeDBType.I4 == DbType, "Value_I4");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_I4");
return RowBinding.ReadInt32(ValueOffset);
}
private void Value_I4(int value)
{
Debug.Assert(NativeDBType.I4 == DbType, "Value_I4");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt32(ValueOffset, value);
}
private long Value_I8()
{
Debug.Assert(NativeDBType.I8 == DbType, "Value_I8");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_I8");
return RowBinding.ReadInt64(ValueOffset);
}
private void Value_I8(long value)
{
Debug.Assert(NativeDBType.I8 == DbType, "Value_I8");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt64(ValueOffset, value);
}
private object Value_IDISPATCH()
{
Debug.Assert(NativeDBType.IDISPATCH == DbType, "Value_IDISPATCH");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_IDISPATCH");
object value;
RowBinding bindings = RowBinding;
bool mustRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
bindings.DangerousAddRef(ref mustRelease);
IntPtr ptr = bindings.ReadIntPtr(ValueOffset);
value = Marshal.GetObjectForIUnknown(ptr);
}
finally
{
if (mustRelease)
{
bindings.DangerousRelease();
}
}
return value;
}
private void Value_IDISPATCH(object value)
{
// UNDONE: OLE DB will IUnknown.Release input storage parameter values
Debug.Assert(NativeDBType.IDISPATCH == DbType, "Value_IDISPATCH");
LengthValue(0);
StatusValue(DBStatus.S_OK);
IntPtr ptr = IntPtr.Zero;
// lazy init reflection objects
if (s_getIDispatchForObject == null)
{
object delegateInstance = null;
MethodInfo mi = typeof(Marshal).GetMethod("GetIDispatchForObject", BindingFlags.Public | BindingFlags.Static);
if (mi == null)
{
throw new NotSupportedException(SR.PlatformNotSupported_GetIDispatchForObject);
}
Volatile.Write(ref delegateInstance, mi.CreateDelegate(typeof(Func<object, IntPtr>)));
s_getIDispatchForObject = delegateInstance as Func<object, IntPtr>;
ptr = s_getIDispatchForObject(value);
}
RowBinding.WriteIntPtr(ValueOffset, ptr);
}
private object Value_IUNKNOWN()
{
Debug.Assert(NativeDBType.IUNKNOWN == DbType, "Value_IUNKNOWN");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_IUNKNOWN");
object value;
RowBinding bindings = RowBinding;
bool mustRelease = false;
RuntimeHelpers.PrepareConstrainedRegions();
try
{
bindings.DangerousAddRef(ref mustRelease);
IntPtr ptr = bindings.ReadIntPtr(ValueOffset);
value = Marshal.GetObjectForIUnknown(ptr);
}
finally
{
if (mustRelease)
{
bindings.DangerousRelease();
}
}
return value;
}
private void Value_IUNKNOWN(object value)
{
// UNDONE: OLE DB will IUnknown.Release input storage parameter values
Debug.Assert(NativeDBType.IUNKNOWN == DbType, "Value_IUNKNOWN");
LengthValue(0);
StatusValue(DBStatus.S_OK);
IntPtr ptr = Marshal.GetIUnknownForObject(value);
RowBinding.WriteIntPtr(ValueOffset, ptr);
}
private decimal Value_NUMERIC()
{
Debug.Assert(NativeDBType.NUMERIC == DbType, "Value_NUMERIC");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_NUMERIC");
return RowBinding.ReadNumeric(ValueOffset);
}
private void Value_NUMERIC(decimal value)
{
Debug.Assert(NativeDBType.NUMERIC == DbType, "Value_NUMERIC");
/* pending breaking change approval
if (_precision < ((System.Data.SqlTypes.SqlDecimal) value).Precision) {
throw ADP.ParameterValueOutOfRange(value);
}
*/
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteNumeric(ValueOffset, value, ColumnBindingPrecision);
}
private float Value_R4()
{
Debug.Assert(NativeDBType.R4 == DbType, "Value_R4");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_R4");
return RowBinding.ReadSingle(ValueOffset);
}
private void Value_R4(float value)
{
Debug.Assert(NativeDBType.R4 == DbType, "Value_R4");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteSingle(ValueOffset, value);
}
private double Value_R8()
{
Debug.Assert(NativeDBType.R8 == DbType, "Value_R8");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_R8");
return RowBinding.ReadDouble(ValueOffset);
}
private void Value_R8(double value)
{
Debug.Assert(NativeDBType.R8 == DbType, "Value_I4");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteDouble(ValueOffset, value);
}
private byte Value_UI1()
{
Debug.Assert(NativeDBType.UI1 == DbType, "Value_UI1");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_UI1");
return RowBinding.ReadByte(ValueOffset);
}
private void Value_UI1(byte value)
{
Debug.Assert(NativeDBType.UI1 == DbType, "Value_UI1");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteByte(ValueOffset, value);
}
internal ushort Value_UI2()
{
Debug.Assert(NativeDBType.UI2 == DbType, "Value_UI2");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_UI2");
return unchecked((ushort)RowBinding.ReadInt16(ValueOffset));
}
private void Value_UI2(ushort value)
{
Debug.Assert(NativeDBType.UI2 == DbType, "Value_UI2");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt16(ValueOffset, unchecked((short)value));
}
internal uint Value_UI4()
{
Debug.Assert(NativeDBType.UI4 == DbType, "Value_UI4");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_UI4");
return unchecked((uint)RowBinding.ReadInt32(ValueOffset));
}
private void Value_UI4(uint value)
{
Debug.Assert(NativeDBType.UI4 == DbType, "Value_UI4");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt32(ValueOffset, unchecked((int)value));
}
internal ulong Value_UI8()
{
Debug.Assert(NativeDBType.UI8 == DbType, "Value_UI8");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_UI8");
return unchecked((ulong)RowBinding.ReadInt64(ValueOffset));
}
private void Value_UI8(ulong value)
{
Debug.Assert(NativeDBType.UI8 == DbType, "Value_UI8");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.WriteInt64(ValueOffset, unchecked((long)value));
}
private string Value_WSTR()
{
Debug.Assert(NativeDBType.WSTR == DbType, "Value_WSTR");
Debug.Assert((DBStatus.S_OK == StatusValue()) || (DBStatus.S_TRUNCATED == StatusValue()), "Value_WSTR");
Debug.Assert(2 < ColumnBindingMaxLen, "Value_WSTR");
int byteCount = Math.Min(LengthValue(), ColumnBindingMaxLen - 2);
return RowBinding.PtrToStringUni(ValueOffset, byteCount / 2);
}
private void Value_WSTR(string value)
{
Debug.Assert(null != value, "Value_BYTES null");
Debug.Assert(NativeDBType.WSTR == DbType, "Value_WSTR");
// we silently truncate when the user has specified a given Size
int charCount = ((ValueBindingOffset < value.Length) ? Math.Min(value.Length - ValueBindingOffset, (ColumnBindingMaxLen - 2) / 2) : 0);
LengthValue(charCount * 2);
StatusValue(DBStatus.S_OK);
if (0 < charCount)
{
char[] chars = value.ToCharArray(ValueBindingOffset, charCount);
RowBinding.WriteCharArray(ValueOffset, chars, ValueBindingOffset, charCount);
}
}
private void Value_WSTR(char[] value)
{
Debug.Assert(null != value, "Value_BYTES null");
Debug.Assert(NativeDBType.WSTR == DbType, "Value_WSTR");
// we silently truncate when the user has specified a given Size
int charCount = ((ValueBindingOffset < value.Length) ? Math.Min(value.Length - ValueBindingOffset, (ColumnBindingMaxLen - 2) / 2) : 0);
LengthValue(charCount * 2);
StatusValue(DBStatus.S_OK);
if (0 < charCount)
{
RowBinding.WriteCharArray(ValueOffset, value, ValueBindingOffset, charCount);
}
}
private object Value_VARIANT()
{
Debug.Assert((NativeDBType.VARIANT == DbType) || (NativeDBType.PROPVARIANT == DbType), "Value_VARIANT");
Debug.Assert(DBStatus.S_OK == StatusValue(), "Value_VARIANT");
return RowBinding.GetVariantValue(ValueOffset);
}
private void Value_VARIANT(object value)
{
Debug.Assert((NativeDBType.VARIANT == DbType) || (NativeDBType.PROPVARIANT == DbType), "Value_VARIANT");
LengthValue(0);
StatusValue(DBStatus.S_OK);
RowBinding.SetVariantValue(ValueOffset, value);
}
internal bool ValueBoolean()
{
bool value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.BOOL:
value = Value_BOOL();
break;
case NativeDBType.VARIANT:
value = (bool)ValueVariant();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(bool));
}
return value;
}
internal byte[] ValueByteArray()
{
byte[] value = (byte[])_value;
if (null == value)
{
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.BYTES:
value = Value_BYTES(); // String
break;
case NativeDBType.VARIANT:
value = (byte[])ValueVariant(); // Object
break;
case (NativeDBType.BYREF | NativeDBType.BYTES):
value = Value_ByRefBYTES();
break;
default:
throw ODB.ConversionRequired();
}
break;
case DBStatus.S_TRUNCATED:
switch (DbType)
{
case NativeDBType.BYTES:
value = Value_BYTES();
break;
case (NativeDBType.BYREF | NativeDBType.BYTES):
value = Value_ByRefBYTES();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(byte[]));
}
_value = value;
}
return value;
}
internal byte ValueByte()
{
byte value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.UI1:
value = Value_UI1();
break;
case NativeDBType.VARIANT:
value = (byte)ValueVariant();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(byte));
}
return value;
}
internal OleDbDataReader ValueChapter()
{
OleDbDataReader value = (OleDbDataReader)_value;
if (null == value)
{
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.HCHAPTER:
value = Value_HCHAPTER(); // OleDbDataReader
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(string));
}
_value = value;
}
return value;
}
internal DateTime ValueDateTime()
{
DateTime value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.DATE:
value = Value_DATE();
break;
case NativeDBType.DBDATE:
value = Value_DBDATE();
break;
case NativeDBType.DBTIMESTAMP:
value = Value_DBTIMESTAMP();
break;
case NativeDBType.FILETIME:
value = Value_FILETIME();
break;
case NativeDBType.VARIANT:
value = (DateTime)ValueVariant();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(short));
}
return value;
}
internal decimal ValueDecimal()
{
decimal value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.CY:
value = Value_CY();
break;
case NativeDBType.DECIMAL:
value = Value_DECIMAL();
break;
case NativeDBType.NUMERIC:
value = Value_NUMERIC();
break;
case NativeDBType.UI8:
value = (decimal)Value_UI8();
break;
case NativeDBType.VARIANT:
value = (decimal)ValueVariant();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(short));
}
return value;
}
internal Guid ValueGuid()
{
Guid value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.GUID:
value = Value_GUID();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(short));
}
return value;
}
internal short ValueInt16()
{
short value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.I2:
value = Value_I2();
break;
case NativeDBType.I1:
value = (short)Value_I1();
break;
case NativeDBType.VARIANT:
object variant = ValueVariant();
if (variant is sbyte)
{
value = (short)(sbyte)variant;
}
else
{
value = (short)variant;
}
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(short));
}
return value;
}
internal int ValueInt32()
{
int value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.I4:
value = Value_I4();
break;
case NativeDBType.UI2:
value = (int)Value_UI2();
break;
case NativeDBType.VARIANT:
object variant = ValueVariant();
if (variant is ushort)
{
value = (int)(ushort)variant;
}
else
{
value = (int)variant;
}
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(int));
}
return value;
}
internal long ValueInt64()
{
long value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.I8:
value = Value_I8();
break;
case NativeDBType.UI4:
value = (long)Value_UI4();
break;
case NativeDBType.VARIANT:
object variant = ValueVariant();
if (variant is uint)
{
value = (long)(uint)variant;
}
else
{
value = (long)variant;
}
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(long));
}
return value;
}
internal float ValueSingle()
{
float value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.R4:
value = Value_R4();
break;
case NativeDBType.VARIANT:
value = (float)ValueVariant();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(float));
}
return value;
}
internal double ValueDouble()
{
double value;
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.R8:
value = Value_R8();
break;
case NativeDBType.VARIANT:
value = (double)ValueVariant();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(double));
}
return value;
}
internal string ValueString()
{
string value = (string)_value;
if (null == value)
{
switch (StatusValue())
{
case DBStatus.S_OK:
switch (DbType)
{
case NativeDBType.BSTR:
value = Value_BSTR(); // String
break;
case NativeDBType.VARIANT:
value = (string)ValueVariant(); // Object
break;
case NativeDBType.WSTR:
value = Value_WSTR(); // String
break;
case (NativeDBType.BYREF | NativeDBType.WSTR):
value = Value_ByRefWSTR();
break;
default:
throw ODB.ConversionRequired();
}
break;
case DBStatus.S_TRUNCATED:
switch (DbType)
{
case NativeDBType.WSTR:
value = Value_WSTR();
break;
case (NativeDBType.BYREF | NativeDBType.WSTR):
value = Value_ByRefWSTR();
break;
default:
throw ODB.ConversionRequired();
}
break;
default:
throw CheckTypeValueStatusValue(typeof(string));
}
_value = value;
}
return value;
}
private object ValueVariant()
{
object value = _value;
if (null == value)
{
value = Value_VARIANT();
_value = value;
}
return value;
}
private Exception CheckTypeValueStatusValue()
{
return CheckTypeValueStatusValue(ExpectedType);
}
private Exception CheckTypeValueStatusValue(Type expectedType)
{
switch (StatusValue())
{
case DBStatus.S_OK:
Debug.Assert(false, "CheckStatusValue: unhandled data with ok status");
goto case DBStatus.E_CANTCONVERTVALUE;
case DBStatus.S_TRUNCATED:
Debug.Assert(false, "CheckStatusValue: unhandled data with truncated status");
goto case DBStatus.E_CANTCONVERTVALUE;
case DBStatus.E_BADACCESSOR:
return ODB.BadAccessor();
case DBStatus.E_CANTCONVERTVALUE:
return ODB.CantConvertValue(); // UNDONE: need original data type
case DBStatus.S_ISNULL: // database null
return ADP.InvalidCast(); // UNDONE: NullValue exception
case DBStatus.E_SIGNMISMATCH:
return ODB.SignMismatch(expectedType);
case DBStatus.E_DATAOVERFLOW:
return ODB.DataOverflow(expectedType);
case DBStatus.E_CANTCREATE:
return ODB.CantCreate(expectedType);
case DBStatus.E_UNAVAILABLE:
return ODB.Unavailable(expectedType);
default:
return ODB.UnexpectedStatusValue(StatusValue());
}
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
using System;
using Oculus.Avatar;
public class OvrAvatarSkinnedMeshPBSV2RenderComponent : OvrAvatarRenderComponent
{
private OvrAvatarMaterialManager avatarMaterialManager;
private bool previouslyActive = false;
private bool isCombinedMaterial = false;
private ovrAvatarExpressiveParameters ExpressiveParameters;
private bool EnableExpressive = false;
private int blendShapeCount = 0;
private ovrAvatarBlendShapeParams blendShapeParams;
private const string MAIN_MATERIAL_NAME = "main_material";
private const string EYE_MATERIAL_NAME = "eye_material";
private const string DEFAULT_MATERIAL_NAME = "_material";
internal void Initialize(
IntPtr renderPart,
ovrAvatarRenderPart_SkinnedMeshRenderPBS_V2 skinnedMeshRender,
OvrAvatarMaterialManager materialManager,
int thirdPersonLayer,
int firstPersonLayer,
bool combinedMesh,
ovrAvatarAssetLevelOfDetail lod,
bool assignExpressiveParams,
OvrAvatar avatar,
bool isControllerModel)
{
avatarMaterialManager = materialManager;
isCombinedMaterial = combinedMesh;
mesh = CreateSkinnedMesh(
skinnedMeshRender.meshAssetID,
skinnedMeshRender.visibilityMask,
thirdPersonLayer,
firstPersonLayer);
EnableExpressive = assignExpressiveParams;
#if UNITY_ANDROID
var singleComponentShader = EnableExpressive
? avatar.Skinshaded_Expressive_VertFrag_SingleComponent
: avatar.Skinshaded_VertFrag_SingleComponent;
#else
var singleComponentShader = EnableExpressive
? avatar.Skinshaded_Expressive_SurfaceShader_SingleComponent
: avatar.Skinshaded_SurfaceShader_SingleComponent;
#endif
var combinedComponentShader = EnableExpressive
? avatar.Skinshaded_Expressive_VertFrag_CombinedMesh
: avatar.Skinshaded_VertFrag_CombinedMesh;
var mainShader = isCombinedMaterial ? combinedComponentShader : singleComponentShader;
if (isControllerModel)
{
mainShader = avatar.ControllerShader;
}
AvatarLogger.Log("OvrAvatarSkinnedMeshPBSV2RenderComponent Shader is: " + mainShader != null
? mainShader.name : "null");
if (EnableExpressive)
{
ExpressiveParameters = CAPI.ovrAvatar_GetExpressiveParameters(avatar.sdkAvatar);
var eyeShader = avatar.EyeLens;
Material[] matArray = new Material[2];
matArray[0] = CreateAvatarMaterial(gameObject.name + MAIN_MATERIAL_NAME, mainShader);
matArray[1] = CreateAvatarMaterial(gameObject.name + EYE_MATERIAL_NAME, eyeShader);
if (avatar.UseTransparentRenderQueue)
{
SetMaterialTransparent(matArray[0]);
}
else
{
SetMaterialOpaque(matArray[0]);
}
// Eye lens shader queue is transparent and set from shader
matArray[1].renderQueue = -1;
mesh.materials = matArray;
}
else
{
mesh.sharedMaterial = CreateAvatarMaterial(gameObject.name + DEFAULT_MATERIAL_NAME, mainShader);
if (avatar.UseTransparentRenderQueue && !isControllerModel)
{
SetMaterialTransparent(mesh.sharedMaterial);
}
else
{
SetMaterialOpaque(mesh.sharedMaterial);
}
}
bones = mesh.bones;
if (isCombinedMaterial)
{
avatarMaterialManager.SetRenderer(mesh);
InitializeCombinedMaterial(renderPart, (int)lod);
avatarMaterialManager.OnCombinedMeshReady();
}
blendShapeParams = new ovrAvatarBlendShapeParams();
blendShapeParams.blendShapeParamCount = 0;
blendShapeParams.blendShapeParams = new float[64];
blendShapeCount = mesh.sharedMesh.blendShapeCount;
}
public void UpdateSkinnedMeshRender(
OvrAvatarComponent component,
OvrAvatar avatar,
IntPtr renderPart)
{
ovrAvatarVisibilityFlags visibilityMask
= CAPI.ovrAvatarSkinnedMeshRenderPBSV2_GetVisibilityMask(renderPart);
ovrAvatarTransform localTransform
= CAPI.ovrAvatarSkinnedMeshRenderPBSV2_GetTransform(renderPart);
UpdateSkinnedMesh(avatar, bones, localTransform, visibilityMask, renderPart);
bool isActive = gameObject.activeSelf;
if (mesh != null && !previouslyActive && isActive)
{
if (!isCombinedMaterial)
{
InitializeSingleComponentMaterial(renderPart, (int)avatar.LevelOfDetail - 1);
}
}
if (blendShapeCount > 0)
{
const float BLEND_MULTIPLIER = 100.0f;
CAPI.ovrAvatarSkinnedMeshRender_GetBlendShapeParams(renderPart, ref blendShapeParams);
for (uint i = 0; i < blendShapeParams.blendShapeParamCount && i < blendShapeCount; i++)
{
float value = blendShapeParams.blendShapeParams[i];
mesh.SetBlendShapeWeight((int)i, value * BLEND_MULTIPLIER);
}
}
previouslyActive = isActive;
}
private void InitializeSingleComponentMaterial(IntPtr renderPart, int lodIndex)
{
ovrAvatarPBSMaterialState materialState =
CAPI.ovrAvatarSkinnedMeshRenderPBSV2_GetPBSMaterialState(renderPart);
int componentType = (int)OvrAvatarMaterialManager.GetComponentType(gameObject.name);
Texture2D diffuseTexture = OvrAvatarComponent.GetLoadedTexture(materialState.albedoTextureID);
Texture2D normalTexture = OvrAvatarComponent.GetLoadedTexture(materialState.normalTextureID);
Texture2D metallicTexture = OvrAvatarComponent.GetLoadedTexture(materialState.metallicnessTextureID);
if (diffuseTexture != null)
{
avatarMaterialManager.AddTextureIDToTextureManager(materialState.albedoTextureID, true);
}
else
{
diffuseTexture = OvrAvatarSDKManager.Instance.GetTextureCopyManager().FallbackTextureSets[lodIndex].DiffuseRoughness;
}
diffuseTexture.anisoLevel = 4;
if (normalTexture != null)
{
avatarMaterialManager.AddTextureIDToTextureManager(materialState.normalTextureID, true);
}
else
{
normalTexture = OvrAvatarSDKManager.Instance.GetTextureCopyManager().FallbackTextureSets[lodIndex].Normal;
}
normalTexture.anisoLevel = 4;
if (metallicTexture != null)
{
avatarMaterialManager.AddTextureIDToTextureManager(materialState.metallicnessTextureID, true);
}
else
{
metallicTexture = OvrAvatarSDKManager.Instance.GetTextureCopyManager().FallbackTextureSets[lodIndex].DiffuseRoughness;
}
metallicTexture.anisoLevel = 16;
mesh.materials[0].SetTexture(OvrAvatarMaterialManager.AVATAR_SHADER_MAINTEX, diffuseTexture);
mesh.materials[0].SetTexture(OvrAvatarMaterialManager.AVATAR_SHADER_NORMALMAP, normalTexture);
mesh.materials[0].SetTexture(OvrAvatarMaterialManager.AVATAR_SHADER_ROUGHNESSMAP, metallicTexture);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_COLOR, materialState.albedoMultiplier);
mesh.materials[0].SetFloat(OvrAvatarMaterialManager.AVATAR_SHADER_DIFFUSEINTENSITY,
OvrAvatarMaterialManager.DiffuseIntensities[componentType]);
mesh.materials[0].SetFloat(OvrAvatarMaterialManager.AVATAR_SHADER_RIMINTENSITY,
OvrAvatarMaterialManager.RimIntensities[componentType]);
mesh.materials[0].SetFloat(OvrAvatarMaterialManager.AVATAR_SHADER_REFLECTIONINTENSITY,
OvrAvatarMaterialManager.ReflectionIntensities[componentType]);
mesh.GetClosestReflectionProbes(avatarMaterialManager.ReflectionProbes);
if (avatarMaterialManager.ReflectionProbes != null &&
avatarMaterialManager.ReflectionProbes.Count > 0)
{
mesh.materials[0].SetTexture(OvrAvatarMaterialManager.AVATAR_SHADER_CUBEMAP,
avatarMaterialManager.ReflectionProbes[0].probe.texture);
}
if (EnableExpressive)
{
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_IRIS_COLOR,
ExpressiveParameters.irisColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_LIP_COLOR,
ExpressiveParameters.lipColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_BROW_COLOR,
ExpressiveParameters.browColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_LASH_COLOR,
ExpressiveParameters.lashColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_SCLERA_COLOR,
ExpressiveParameters.scleraColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_GUM_COLOR,
ExpressiveParameters.gumColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_TEETH_COLOR,
ExpressiveParameters.teethColor);
mesh.materials[0].SetFloat(OvrAvatarMaterialManager.AVATAR_SHADER_LIP_SMOOTHNESS,
ExpressiveParameters.lipSmoothness);
}
}
private void InitializeCombinedMaterial(IntPtr renderPart, int lodIndex)
{
ovrAvatarPBSMaterialState[] materialStates = CAPI.ovrAvatar_GetBodyPBSMaterialStates(renderPart);
if (materialStates.Length == (int)ovrAvatarBodyPartType.Count)
{
avatarMaterialManager.CreateTextureArrays();
var localProperties = avatarMaterialManager.LocalAvatarConfig.ComponentMaterialProperties;
AvatarLogger.Log("InitializeCombinedMaterial - Loading Material States");
for (int i = 0; i < materialStates.Length; i++)
{
localProperties[i].TypeIndex = (ovrAvatarBodyPartType)i;
localProperties[i].Color = materialStates[i].albedoMultiplier;
localProperties[i].DiffuseIntensity = OvrAvatarMaterialManager.DiffuseIntensities[i];
localProperties[i].RimIntensity = OvrAvatarMaterialManager.RimIntensities[i];
localProperties[i].ReflectionIntensity = OvrAvatarMaterialManager.ReflectionIntensities[i];
var diffuse = OvrAvatarComponent.GetLoadedTexture(materialStates[i].albedoTextureID);
var normal = OvrAvatarComponent.GetLoadedTexture(materialStates[i].normalTextureID);
var roughness = OvrAvatarComponent.GetLoadedTexture(materialStates[i].metallicnessTextureID);
if (diffuse != null)
{
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.DiffuseTextures] = diffuse;
avatarMaterialManager.AddTextureIDToTextureManager(materialStates[i].albedoTextureID, false);
}
else
{
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.DiffuseTextures] =
OvrAvatarSDKManager.Instance.GetTextureCopyManager().FallbackTextureSets[lodIndex].DiffuseRoughness;
}
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.DiffuseTextures].anisoLevel = 4;
if (normal != null)
{
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.NormalMaps] = normal;
avatarMaterialManager.AddTextureIDToTextureManager(materialStates[i].normalTextureID, false);
}
else
{
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.NormalMaps] =
OvrAvatarSDKManager.Instance.GetTextureCopyManager().FallbackTextureSets[lodIndex].Normal;
}
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.NormalMaps].anisoLevel = 4;
if (roughness != null)
{
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.RoughnessMaps] = roughness;
avatarMaterialManager.AddTextureIDToTextureManager(materialStates[i].metallicnessTextureID, false);
}
else
{
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.RoughnessMaps] =
OvrAvatarSDKManager.Instance.GetTextureCopyManager().FallbackTextureSets[lodIndex].DiffuseRoughness;
}
localProperties[i].Textures[(int)OvrAvatarMaterialManager.TextureType.RoughnessMaps].anisoLevel = 16;
AvatarLogger.Log(localProperties[i].TypeIndex.ToString());
AvatarLogger.Log(AvatarLogger.Tab + "Diffuse: " + materialStates[i].albedoTextureID);
AvatarLogger.Log(AvatarLogger.Tab + "Normal: " + materialStates[i].normalTextureID);
AvatarLogger.Log(AvatarLogger.Tab + "Metallic: " + materialStates[i].metallicnessTextureID);
}
if (EnableExpressive)
{
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_IRIS_COLOR,
ExpressiveParameters.irisColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_LIP_COLOR,
ExpressiveParameters.lipColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_BROW_COLOR,
ExpressiveParameters.browColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_LASH_COLOR,
ExpressiveParameters.lashColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_SCLERA_COLOR,
ExpressiveParameters.scleraColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_GUM_COLOR,
ExpressiveParameters.gumColor);
mesh.materials[0].SetVector(OvrAvatarMaterialManager.AVATAR_SHADER_TEETH_COLOR,
ExpressiveParameters.teethColor);
mesh.materials[0].SetFloat(OvrAvatarMaterialManager.AVATAR_SHADER_LIP_SMOOTHNESS,
ExpressiveParameters.lipSmoothness);
}
avatarMaterialManager.ValidateTextures(materialStates);
}
}
private void SetMaterialTransparent(Material mat)
{
// Initialize shader to use transparent render queue with alpha blending
mat.SetOverrideTag("Queue", "Transparent");
mat.SetOverrideTag("RenderType", "Transparent");
mat.SetInt("_SrcBlend", (int)UnityEngine.Rendering.BlendMode.SrcAlpha);
mat.SetInt("_DstBlend", (int)UnityEngine.Rendering.BlendMode.OneMinusSrcAlpha);
mat.EnableKeyword("_ALPHATEST_ON");
mat.EnableKeyword("_ALPHABLEND_ON");
mat.EnableKeyword("_ALPHAPREMULTIPLY_ON");
mat.renderQueue = (int)UnityEngine.Rendering.RenderQueue.Transparent;
}
private void SetMaterialOpaque(Material mat)
{
// Initialize shader to use geometry render queue with no blending
mat.SetOverrideTag("Queue", "Geometry");
mat.SetOverrideTag("RenderType", "Opaque");
mat.SetInt("_SrcBlend", (int)UnityEngine.Rendering.BlendMode.One);
mat.SetInt("_DstBlend", (int)UnityEngine.Rendering.BlendMode.Zero);
mat.DisableKeyword("_ALPHATEST_ON");
mat.DisableKeyword("_ALPHABLEND_ON");
mat.DisableKeyword("_ALPHAPREMULTIPLY_ON");
mat.renderQueue = (int)UnityEngine.Rendering.RenderQueue.Geometry;
}
}
| |
#if !DISABLE_PLAYFABENTITY_API
using System;
using System.Collections.Generic;
using PlayFab.SharedModels;
namespace PlayFab.GroupsModels
{
/// <summary>
/// Accepts an outstanding invitation to to join a group if the invited entity is not blocked by the group. Nothing is
/// returned in the case of success.
/// </summary>
[Serializable]
public class AcceptGroupApplicationRequest : PlayFabRequestCommon
{
/// <summary>
/// Optional. Type of the entity to accept as. If specified, must be the same entity as the claimant or an entity that is a
/// child of the claimant entity. Defaults to the claimant entity.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Accepts an outstanding invitation to join the group if the invited entity is not blocked by the group. Only the invited
/// entity or a parent in its chain (e.g. title) may accept the invitation on the invited entity's behalf. Nothing is
/// returned in the case of success.
/// </summary>
[Serializable]
public class AcceptGroupInvitationRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Adds members to a group or role. Existing members of the group will added to roles within the group, but if the user is
/// not already a member of the group, only title claimants may add them to the group, and others must use the group
/// application or invite system to add new members to a group. Returns nothing if successful.
/// </summary>
[Serializable]
public class AddMembersRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// List of entities to add to the group. Only entities of type title_player_account and character may be added to groups.
/// </summary>
public List<EntityKey> Members;
/// <summary>
/// Optional: The ID of the existing role to add the entities to. If this is not specified, the default member role for the
/// group will be used. Role IDs must be between 1 and 64 characters long.
/// </summary>
public string RoleId;
}
/// <summary>
/// Creates an application to join a group. Calling this while a group application already exists will return the same
/// application instead of an error and will not refresh the time before the application expires. By default, if the entity
/// has an invitation to join the group outstanding, this will accept the invitation to join the group instead and return an
/// error indicating such, rather than creating a duplicate application to join that will need to be cleaned up later.
/// Returns information about the application or an error indicating an invitation was accepted instead.
/// </summary>
[Serializable]
public class ApplyToGroupRequest : PlayFabRequestCommon
{
/// <summary>
/// Optional, default true. Automatically accept an outstanding invitation if one exists instead of creating an application
/// </summary>
public bool? AutoAcceptOutstandingInvite;
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Describes an application to join a group
/// </summary>
[Serializable]
public class ApplyToGroupResponse : PlayFabResultCommon
{
/// <summary>
/// Type of entity that requested membership
/// </summary>
public EntityWithLineage Entity;
/// <summary>
/// When the application to join will expire and be deleted
/// </summary>
public DateTime Expires;
/// <summary>
/// ID of the group that the entity requesting membership to
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Blocks a list of entities from joining a group. Blocked entities may not create new applications to join, be invited to
/// join, accept an invitation, or have an application accepted. Failure due to being blocked does not clean up existing
/// applications or invitations to the group. No data is returned in the case of success.
/// </summary>
[Serializable]
public class BlockEntityRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Changes the role membership of a list of entities from one role to another in in a single operation. The destination
/// role must already exist. This is equivalent to adding the entities to the destination role and removing from the origin
/// role. Returns nothing if successful.
/// </summary>
[Serializable]
public class ChangeMemberRoleRequest : PlayFabRequestCommon
{
/// <summary>
/// The ID of the role that the entities will become a member of. This must be an existing role. Role IDs must be between 1
/// and 64 characters long.
/// </summary>
public string DestinationRoleId;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// List of entities to move between roles in the group. All entities in this list must be members of the group and origin
/// role.
/// </summary>
public List<EntityKey> Members;
/// <summary>
/// The ID of the role that the entities currently are a member of. Role IDs must be between 1 and 64 characters long.
/// </summary>
public string OriginRoleId;
}
/// <summary>
/// Creates a new group, as well as administration and member roles, based off of a title's group template. Returns
/// information about the group that was created.
/// </summary>
[Serializable]
public class CreateGroupRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The name of the group. This is unique at the title level by default.
/// </summary>
public string GroupName;
}
[Serializable]
public class CreateGroupResponse : PlayFabResultCommon
{
/// <summary>
/// The ID of the administrator role for the group.
/// </summary>
public string AdminRoleId;
/// <summary>
/// The server date and time the group was created.
/// </summary>
public DateTime Created;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// The name of the group.
/// </summary>
public string GroupName;
/// <summary>
/// The ID of the default member role for the group.
/// </summary>
public string MemberRoleId;
/// <summary>
/// The current version of the profile, can be used for concurrency control during updates.
/// </summary>
public int ProfileVersion;
/// <summary>
/// The list of roles and names that belong to the group.
/// </summary>
public Dictionary<string,string> Roles;
}
/// <summary>
/// Creates a new role within an existing group, with no members. Both the role ID and role name must be unique within the
/// group, but the name can be the same as the ID. The role ID is set at creation and cannot be changed. Returns information
/// about the role that was created.
/// </summary>
[Serializable]
public class CreateGroupRoleRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// The ID of the role. This must be unique within the group and cannot be changed. Role IDs must be between 1 and 64
/// characters long.
/// </summary>
public string RoleId;
/// <summary>
/// The name of the role. This must be unique within the group and can be changed later. Role names must be between 1 and
/// 100 characters long
/// </summary>
public string RoleName;
}
[Serializable]
public class CreateGroupRoleResponse : PlayFabResultCommon
{
/// <summary>
/// The current version of the group profile, can be used for concurrency control during updates.
/// </summary>
public int ProfileVersion;
/// <summary>
/// ID for the role
/// </summary>
public string RoleId;
/// <summary>
/// The name of the role
/// </summary>
public string RoleName;
}
/// <summary>
/// Deletes a group and all roles, invitations, join requests, and blocks associated with it. Permission to delete is only
/// required the group itself to execute this action. The group and data cannot be cannot be recovered once removed, but any
/// abuse reports about the group will remain. No data is returned in the case of success.
/// </summary>
[Serializable]
public class DeleteGroupRequest : PlayFabRequestCommon
{
/// <summary>
/// ID of the group or role to remove
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Returns information about the role
/// </summary>
[Serializable]
public class DeleteRoleRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// The ID of the role to delete. Role IDs must be between 1 and 64 characters long.
/// </summary>
public string RoleId;
}
[Serializable]
public class EmptyResponse : PlayFabResultCommon
{
}
/// <summary>
/// Combined entity type and ID structure which uniquely identifies a single entity.
/// </summary>
[Serializable]
public class EntityKey
{
/// <summary>
/// Unique ID of the entity.
/// </summary>
public string Id;
/// <summary>
/// Entity type. See https://api.playfab.com/docs/tutorials/entities/entitytypes
/// </summary>
public string Type;
}
[Serializable]
public class EntityMemberRole
{
/// <summary>
/// The list of members in the role
/// </summary>
public List<EntityWithLineage> Members;
/// <summary>
/// The ID of the role.
/// </summary>
public string RoleId;
/// <summary>
/// The name of the role
/// </summary>
public string RoleName;
}
/// <summary>
/// Entity wrapper class that contains the entity key and the entities that make up the lineage of the entity.
/// </summary>
[Serializable]
public class EntityWithLineage
{
/// <summary>
/// The entity key for the specified entity
/// </summary>
public EntityKey Key;
/// <summary>
/// Dictionary of entity keys for related entities. Dictionary key is entity type.
/// </summary>
public Dictionary<string,EntityKey> Lineage;
}
/// <summary>
/// Returns the ID, name, role list and other non-membership related information about a group.
/// </summary>
[Serializable]
public class GetGroupRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// The full name of the group
/// </summary>
public string GroupName;
}
[Serializable]
public class GetGroupResponse : PlayFabResultCommon
{
/// <summary>
/// The ID of the administrator role for the group.
/// </summary>
public string AdminRoleId;
/// <summary>
/// The server date and time the group was created.
/// </summary>
public DateTime Created;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// The name of the group.
/// </summary>
public string GroupName;
/// <summary>
/// The ID of the default member role for the group.
/// </summary>
public string MemberRoleId;
/// <summary>
/// The current version of the profile, can be used for concurrency control during updates.
/// </summary>
public int ProfileVersion;
/// <summary>
/// The list of roles and names that belong to the group.
/// </summary>
public Dictionary<string,string> Roles;
}
/// <summary>
/// Describes an application to join a group
/// </summary>
[Serializable]
public class GroupApplication
{
/// <summary>
/// Type of entity that requested membership
/// </summary>
public EntityWithLineage Entity;
/// <summary>
/// When the application to join will expire and be deleted
/// </summary>
public DateTime Expires;
/// <summary>
/// ID of the group that the entity requesting membership to
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Describes an entity that is blocked from joining a group.
/// </summary>
[Serializable]
public class GroupBlock
{
/// <summary>
/// The entity that is blocked
/// </summary>
public EntityWithLineage Entity;
/// <summary>
/// ID of the group that the entity is blocked from
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Describes an invitation to a group.
/// </summary>
[Serializable]
public class GroupInvitation
{
/// <summary>
/// When the invitation will expire and be deleted
/// </summary>
public DateTime Expires;
/// <summary>
/// The group that the entity invited to
/// </summary>
public EntityKey Group;
/// <summary>
/// The entity that created the invitation
/// </summary>
public EntityWithLineage InvitedByEntity;
/// <summary>
/// The entity that is invited
/// </summary>
public EntityWithLineage InvitedEntity;
/// <summary>
/// ID of the role in the group to assign the user to.
/// </summary>
public string RoleId;
}
/// <summary>
/// Describes a group role
/// </summary>
[Serializable]
public class GroupRole
{
/// <summary>
/// ID for the role
/// </summary>
public string RoleId;
/// <summary>
/// The name of the role
/// </summary>
public string RoleName;
}
/// <summary>
/// Describes a group and the roles that it contains
/// </summary>
[Serializable]
public class GroupWithRoles
{
/// <summary>
/// ID for the group
/// </summary>
public EntityKey Group;
/// <summary>
/// The name of the group
/// </summary>
public string GroupName;
/// <summary>
/// The current version of the profile, can be used for concurrency control during updates.
/// </summary>
public int ProfileVersion;
/// <summary>
/// The list of roles within the group
/// </summary>
public List<GroupRole> Roles;
}
/// <summary>
/// Invites a player to join a group, if they are not blocked by the group. An optional role can be provided to
/// automatically assign the player to the role if they accept the invitation. By default, if the entity has an application
/// to the group outstanding, this will accept the application instead and return an error indicating such, rather than
/// creating a duplicate invitation to join that will need to be cleaned up later. Returns information about the new
/// invitation or an error indicating an existing application to join was accepted.
/// </summary>
[Serializable]
public class InviteToGroupRequest : PlayFabRequestCommon
{
/// <summary>
/// Optional, default true. Automatically accept an application if one exists instead of creating an invitation
/// </summary>
public bool? AutoAcceptOutstandingApplication;
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// Optional. ID of an existing a role in the group to assign the user to. The group's default member role is used if this
/// is not specified. Role IDs must be between 1 and 64 characters long.
/// </summary>
public string RoleId;
}
/// <summary>
/// Describes an invitation to a group.
/// </summary>
[Serializable]
public class InviteToGroupResponse : PlayFabResultCommon
{
/// <summary>
/// When the invitation will expire and be deleted
/// </summary>
public DateTime Expires;
/// <summary>
/// The group that the entity invited to
/// </summary>
public EntityKey Group;
/// <summary>
/// The entity that created the invitation
/// </summary>
public EntityWithLineage InvitedByEntity;
/// <summary>
/// The entity that is invited
/// </summary>
public EntityWithLineage InvitedEntity;
/// <summary>
/// ID of the role in the group to assign the user to.
/// </summary>
public string RoleId;
}
/// <summary>
/// Checks to see if an entity is a member of a group or role within the group. A result indicating if the entity is a
/// member of the group is returned, or a permission error if the caller does not have permission to read the group's member
/// list.
/// </summary>
[Serializable]
public class IsMemberRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// Optional: ID of the role to check membership of. Defaults to any role (that is, check to see if the entity is a member
/// of the group in any capacity) if not specified.
/// </summary>
public string RoleId;
}
[Serializable]
public class IsMemberResponse : PlayFabResultCommon
{
/// <summary>
/// A value indicating whether or not the entity is a member.
/// </summary>
public bool IsMember;
}
/// <summary>
/// Lists all outstanding requests to join a group. Returns a list of all requests to join, as well as when the request will
/// expire. To get the group applications for a specific entity, use ListMembershipOpportunities.
/// </summary>
[Serializable]
public class ListGroupApplicationsRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
[Serializable]
public class ListGroupApplicationsResponse : PlayFabResultCommon
{
/// <summary>
/// The requested list of applications to the group.
/// </summary>
public List<GroupApplication> Applications;
}
/// <summary>
/// Lists all entities blocked from joining a group. A list of blocked entities is returned
/// </summary>
[Serializable]
public class ListGroupBlocksRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
[Serializable]
public class ListGroupBlocksResponse : PlayFabResultCommon
{
/// <summary>
/// The requested list blocked entities.
/// </summary>
public List<GroupBlock> BlockedEntities;
}
/// <summary>
/// Lists all outstanding invitations for a group. Returns a list of entities that have been invited, as well as when the
/// invitation will expire. To get the group invitations for a specific entity, use ListMembershipOpportunities.
/// </summary>
[Serializable]
public class ListGroupInvitationsRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
[Serializable]
public class ListGroupInvitationsResponse : PlayFabResultCommon
{
/// <summary>
/// The requested list of group invitations.
/// </summary>
public List<GroupInvitation> Invitations;
}
/// <summary>
/// Gets a list of members and the roles they belong to within the group. If the caller does not have permission to view the
/// role, and the member is in no other role, the member is not displayed. Returns a list of entities that are members of
/// the group.
/// </summary>
[Serializable]
public class ListGroupMembersRequest : PlayFabRequestCommon
{
/// <summary>
/// ID of the group to list the members and roles for
/// </summary>
public EntityKey Group;
}
[Serializable]
public class ListGroupMembersResponse : PlayFabResultCommon
{
/// <summary>
/// The requested list of roles and member entity IDs.
/// </summary>
public List<EntityMemberRole> Members;
}
/// <summary>
/// Lists all outstanding group applications and invitations for an entity. Anyone may call this for any entity, but data
/// will only be returned for the entity or a parent of that entity. To list invitations or applications for a group to
/// check if a player is trying to join, use ListGroupInvitations and ListGroupApplications.
/// </summary>
[Serializable]
public class ListMembershipOpportunitiesRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
}
[Serializable]
public class ListMembershipOpportunitiesResponse : PlayFabResultCommon
{
/// <summary>
/// The requested list of group applications.
/// </summary>
public List<GroupApplication> Applications;
/// <summary>
/// The requested list of group invitations.
/// </summary>
public List<GroupInvitation> Invitations;
}
/// <summary>
/// Lists the groups and roles that an entity is a part of, checking to see if group and role metadata and memberships
/// should be visible to the caller. If the entity is not in any roles that are visible to the caller, the group is not
/// returned in the results, even if the caller otherwise has permission to see that the entity is a member of that group.
/// </summary>
[Serializable]
public class ListMembershipRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
}
[Serializable]
public class ListMembershipResponse : PlayFabResultCommon
{
/// <summary>
/// The list of groups
/// </summary>
public List<GroupWithRoles> Groups;
}
public enum OperationTypes
{
Created,
Updated,
Deleted,
None
}
/// <summary>
/// Removes an existing application to join the group. This is used for both rejection of an application as well as
/// withdrawing an application. The applying entity or a parent in its chain (e.g. title) may withdraw the application, and
/// any caller with appropriate access in the group may reject an application. No data is returned in the case of success.
/// </summary>
[Serializable]
public class RemoveGroupApplicationRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Removes an existing invitation to join the group. This is used for both rejection of an invitation as well as rescinding
/// an invitation. The invited entity or a parent in its chain (e.g. title) may reject the invitation by calling this
/// method, and any caller with appropriate access in the group may rescind an invitation. No data is returned in the case
/// of success.
/// </summary>
[Serializable]
public class RemoveGroupInvitationRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Removes members from a group. A member can always remove themselves from a group, regardless of permissions. Returns
/// nothing if successful.
/// </summary>
[Serializable]
public class RemoveMembersRequest : PlayFabRequestCommon
{
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// List of entities to remove
/// </summary>
public List<EntityKey> Members;
/// <summary>
/// The ID of the role to remove the entities from.
/// </summary>
public string RoleId;
}
/// <summary>
/// Unblocks a list of entities from joining a group. No data is returned in the case of success.
/// </summary>
[Serializable]
public class UnblockEntityRequest : PlayFabRequestCommon
{
/// <summary>
/// The entity to perform this action on.
/// </summary>
public EntityKey Entity;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
}
/// <summary>
/// Updates data about a group, such as the name or default member role. Returns information about whether the update was
/// successful. Only title claimants may modify the administration role for a group.
/// </summary>
[Serializable]
public class UpdateGroupRequest : PlayFabRequestCommon
{
/// <summary>
/// Optional: the ID of an existing role to set as the new administrator role for the group
/// </summary>
public string AdminRoleId;
/// <summary>
/// Optional field used for concurrency control. By specifying the previously returned value of ProfileVersion from the
/// GetGroup API, you can ensure that the group data update will only be performed if the group has not been updated by any
/// other clients since the version you last loaded.
/// </summary>
public int? ExpectedProfileVersion;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// Optional: the new name of the group
/// </summary>
public string GroupName;
/// <summary>
/// Optional: the ID of an existing role to set as the new member role for the group
/// </summary>
public string MemberRoleId;
}
[Serializable]
public class UpdateGroupResponse : PlayFabResultCommon
{
/// <summary>
/// Optional reason to explain why the operation was the result that it was.
/// </summary>
public string OperationReason;
/// <summary>
/// New version of the group data.
/// </summary>
public int ProfileVersion;
/// <summary>
/// Indicates which operation was completed, either Created, Updated, Deleted or None.
/// </summary>
public OperationTypes? SetResult;
}
/// <summary>
/// Updates the role name. Returns information about whether the update was successful.
/// </summary>
[Serializable]
public class UpdateGroupRoleRequest : PlayFabRequestCommon
{
/// <summary>
/// Optional field used for concurrency control. By specifying the previously returned value of ProfileVersion from the
/// GetGroup API, you can ensure that the group data update will only be performed if the group has not been updated by any
/// other clients since the version you last loaded.
/// </summary>
public int? ExpectedProfileVersion;
/// <summary>
/// The identifier of the group
/// </summary>
public EntityKey Group;
/// <summary>
/// ID of the role to update. Role IDs must be between 1 and 64 characters long.
/// </summary>
public string RoleId;
/// <summary>
/// The new name of the role
/// </summary>
public string RoleName;
}
[Serializable]
public class UpdateGroupRoleResponse : PlayFabResultCommon
{
/// <summary>
/// Optional reason to explain why the operation was the result that it was.
/// </summary>
public string OperationReason;
/// <summary>
/// New version of the role data.
/// </summary>
public int ProfileVersion;
/// <summary>
/// Indicates which operation was completed, either Created, Updated, Deleted or None.
/// </summary>
public OperationTypes? SetResult;
}
}
#endif
| |
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
using Neo.Cryptography;
using Neo.IO;
using Neo.Ledger;
using Neo.Network.P2P.Payloads;
using Neo.Persistence;
using Neo.Plugins;
using Neo.SmartContract;
using Neo.SmartContract.Native;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Numerics;
namespace Neo.UnitTests.Ledger
{
internal class TestIMemoryPoolTxObserverPlugin : Plugin, IMemoryPoolTxObserverPlugin
{
public override void Configure() { }
public void TransactionAdded(Transaction tx) { }
public void TransactionsRemoved(MemoryPoolTxRemovalReason reason, IEnumerable<Transaction> transactions) { }
}
[TestClass]
public class UT_MemoryPool
{
private const byte Prefix_MaxTransactionsPerBlock = 23;
private const byte Prefix_FeePerByte = 10;
private MemoryPool _unit;
private MemoryPool _unit2;
private TestIMemoryPoolTxObserverPlugin plugin;
[TestInitialize]
public void TestSetup()
{
// protect against external changes on TimeProvider
TimeProvider.ResetToDefault();
TestBlockchain.InitializeMockNeoSystem();
// Create a MemoryPool with capacity of 100
_unit = new MemoryPool(TestBlockchain.TheNeoSystem, 100);
_unit.LoadPolicy(Blockchain.Singleton.GetSnapshot());
// Verify capacity equals the amount specified
_unit.Capacity.Should().Be(100);
_unit.VerifiedCount.Should().Be(0);
_unit.UnVerifiedCount.Should().Be(0);
_unit.Count.Should().Be(0);
_unit2 = new MemoryPool(TestBlockchain.TheNeoSystem, 0);
plugin = new TestIMemoryPoolTxObserverPlugin();
}
long LongRandom(long min, long max, Random rand)
{
// Only returns positive random long values.
long longRand = (long)rand.NextBigInteger(63);
return longRand % (max - min) + min;
}
private Transaction CreateTransactionWithFee(long fee)
{
Random random = new Random();
var randomBytes = new byte[16];
random.NextBytes(randomBytes);
Mock<Transaction> mock = new Mock<Transaction>();
mock.Setup(p => p.Reverify(It.IsAny<StoreView>(), It.IsAny<BigInteger>())).Returns(true);
mock.Setup(p => p.Verify(It.IsAny<StoreView>(), It.IsAny<BigInteger>())).Returns(true);
mock.Object.Script = randomBytes;
mock.Object.Sender = UInt160.Zero;
mock.Object.NetworkFee = fee;
mock.Object.Attributes = new TransactionAttribute[0];
mock.Object.Cosigners = new Cosigner[0];
mock.Object.Witnesses = new[]
{
new Witness
{
InvocationScript = new byte[0],
VerificationScript = new byte[0]
}
};
return mock.Object;
}
private Transaction CreateTransactionWithFeeAndBalanceVerify(long fee)
{
Random random = new Random();
var randomBytes = new byte[16];
random.NextBytes(randomBytes);
Mock<Transaction> mock = new Mock<Transaction>();
UInt160 sender = UInt160.Zero;
mock.Setup(p => p.Reverify(It.IsAny<StoreView>(), It.IsAny<BigInteger>())).Returns(((StoreView snapshot, BigInteger amount) => NativeContract.GAS.BalanceOf(snapshot, sender) >= amount + fee));
mock.Setup(p => p.Verify(It.IsAny<StoreView>(), It.IsAny<BigInteger>())).Returns(true);
mock.Object.Script = randomBytes;
mock.Object.Sender = sender;
mock.Object.NetworkFee = fee;
mock.Object.Attributes = new TransactionAttribute[0];
mock.Object.Cosigners = new Cosigner[0];
mock.Object.Witnesses = new[]
{
new Witness
{
InvocationScript = new byte[0],
VerificationScript = new byte[0]
}
};
return mock.Object;
}
private Transaction CreateTransaction(long fee = -1)
{
if (fee != -1)
return CreateTransactionWithFee(fee);
return CreateTransactionWithFee(LongRandom(100000, 100000000, TestUtils.TestRandom));
}
private void AddTransactions(int count)
{
for (int i = 0; i < count; i++)
{
var txToAdd = CreateTransaction();
_unit.TryAdd(txToAdd.Hash, txToAdd);
}
Console.WriteLine($"created {count} tx");
}
private void AddTransaction(Transaction txToAdd)
{
_unit.TryAdd(txToAdd.Hash, txToAdd);
}
private void AddTransactionsWithBalanceVerify(int count, long fee)
{
for (int i = 0; i < count; i++)
{
var txToAdd = CreateTransactionWithFeeAndBalanceVerify(fee);
_unit.TryAdd(txToAdd.Hash, txToAdd);
}
Console.WriteLine($"created {count} tx");
}
[TestMethod]
public void CapacityTest()
{
// Add over the capacity items, verify that the verified count increases each time
AddTransactions(101);
Console.WriteLine($"VerifiedCount: {_unit.VerifiedCount} Count {_unit.SortedTxCount}");
_unit.SortedTxCount.Should().Be(100);
_unit.VerifiedCount.Should().Be(100);
_unit.UnVerifiedCount.Should().Be(0);
_unit.Count.Should().Be(100);
}
[TestMethod]
public void BlockPersistMovesTxToUnverifiedAndReverification()
{
AddTransactions(70);
_unit.SortedTxCount.Should().Be(70);
var block = new Block
{
Transactions = _unit.GetSortedVerifiedTransactions().Take(10)
.Concat(_unit.GetSortedVerifiedTransactions().Take(5)).ToArray()
};
_unit.UpdatePoolForBlockPersisted(block, Blockchain.Singleton.GetSnapshot());
_unit.InvalidateVerifiedTransactions();
_unit.SortedTxCount.Should().Be(0);
_unit.UnverifiedSortedTxCount.Should().Be(60);
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(10, Blockchain.Singleton.GetSnapshot());
_unit.SortedTxCount.Should().Be(10);
_unit.UnverifiedSortedTxCount.Should().Be(50);
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(10, Blockchain.Singleton.GetSnapshot());
_unit.SortedTxCount.Should().Be(20);
_unit.UnverifiedSortedTxCount.Should().Be(40);
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(10, Blockchain.Singleton.GetSnapshot());
_unit.SortedTxCount.Should().Be(30);
_unit.UnverifiedSortedTxCount.Should().Be(30);
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(10, Blockchain.Singleton.GetSnapshot());
_unit.SortedTxCount.Should().Be(40);
_unit.UnverifiedSortedTxCount.Should().Be(20);
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(10, Blockchain.Singleton.GetSnapshot());
_unit.SortedTxCount.Should().Be(50);
_unit.UnverifiedSortedTxCount.Should().Be(10);
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(10, Blockchain.Singleton.GetSnapshot());
_unit.SortedTxCount.Should().Be(60);
_unit.UnverifiedSortedTxCount.Should().Be(0);
}
[TestMethod]
public void BlockPersistAndReverificationWillAbandonTxAsBalanceTransfered()
{
long txFee = 1;
AddTransactionsWithBalanceVerify(70, txFee);
_unit.SortedTxCount.Should().Be(70);
var block = new Block
{
Transactions = _unit.GetSortedVerifiedTransactions().Take(10).ToArray()
};
// Simulate the transfer process in tx by burning the balance
UInt160 sender = block.Transactions[0].Sender;
SnapshotView snapshot = Blockchain.Singleton.GetSnapshot();
BigInteger balance = NativeContract.GAS.BalanceOf(snapshot, sender);
ApplicationEngine applicationEngine = new ApplicationEngine(TriggerType.All, block, snapshot, (long)balance);
NativeContract.GAS.Burn(applicationEngine, sender, balance);
NativeContract.GAS.Mint(applicationEngine, sender, txFee * 30); // Set the balance to meet 30 txs only
// Persist block and reverify all the txs in mempool, but half of the txs will be discarded
_unit.UpdatePoolForBlockPersisted(block, snapshot);
_unit.SortedTxCount.Should().Be(30);
_unit.UnverifiedSortedTxCount.Should().Be(0);
// Revert the balance
NativeContract.GAS.Burn(applicationEngine, sender, txFee * 30);
NativeContract.GAS.Mint(applicationEngine, sender, balance);
}
private void VerifyTransactionsSortedDescending(IEnumerable<Transaction> transactions)
{
Transaction lastTransaction = null;
foreach (var tx in transactions)
{
if (lastTransaction != null)
{
if (lastTransaction.FeePerByte == tx.FeePerByte)
{
if (lastTransaction.NetworkFee == tx.NetworkFee)
lastTransaction.Hash.Should().BeLessThan(tx.Hash);
else
lastTransaction.NetworkFee.Should().BeGreaterThan(tx.NetworkFee);
}
else
{
lastTransaction.FeePerByte.Should().BeGreaterThan(tx.FeePerByte);
}
}
lastTransaction = tx;
}
}
[TestMethod]
public void VerifySortOrderAndThatHighetFeeTransactionsAreReverifiedFirst()
{
AddTransactions(100);
var sortedVerifiedTxs = _unit.GetSortedVerifiedTransactions().ToList();
// verify all 100 transactions are returned in sorted order
sortedVerifiedTxs.Count.Should().Be(100);
VerifyTransactionsSortedDescending(sortedVerifiedTxs);
// move all to unverified
var block = new Block { Transactions = new Transaction[0] };
_unit.UpdatePoolForBlockPersisted(block, Blockchain.Singleton.GetSnapshot());
_unit.InvalidateVerifiedTransactions();
_unit.SortedTxCount.Should().Be(0);
_unit.UnverifiedSortedTxCount.Should().Be(100);
// We can verify the order they are re-verified by reverifying 2 at a time
while (_unit.UnVerifiedCount > 0)
{
_unit.GetVerifiedAndUnverifiedTransactions(out var sortedVerifiedTransactions, out var sortedUnverifiedTransactions);
sortedVerifiedTransactions.Count().Should().Be(0);
var sortedUnverifiedArray = sortedUnverifiedTransactions.ToArray();
VerifyTransactionsSortedDescending(sortedUnverifiedArray);
var maxTransaction = sortedUnverifiedArray.First();
var minTransaction = sortedUnverifiedArray.Last();
// reverify 1 high priority and 1 low priority transaction
_unit.ReVerifyTopUnverifiedTransactionsIfNeeded(1, Blockchain.Singleton.GetSnapshot());
var verifiedTxs = _unit.GetSortedVerifiedTransactions().ToArray();
verifiedTxs.Length.Should().Be(1);
verifiedTxs[0].Should().BeEquivalentTo(maxTransaction);
var blockWith2Tx = new Block { Transactions = new[] { maxTransaction, minTransaction } };
// verify and remove the 2 transactions from the verified pool
_unit.UpdatePoolForBlockPersisted(blockWith2Tx, Blockchain.Singleton.GetSnapshot());
_unit.InvalidateVerifiedTransactions();
_unit.SortedTxCount.Should().Be(0);
}
_unit.UnverifiedSortedTxCount.Should().Be(0);
}
void VerifyCapacityThresholdForAttemptingToAddATransaction()
{
var sortedVerified = _unit.GetSortedVerifiedTransactions().ToArray();
var txBarelyWontFit = CreateTransactionWithFee(sortedVerified.Last().NetworkFee - 1);
_unit.CanTransactionFitInPool(txBarelyWontFit).Should().Be(false);
var txBarelyFits = CreateTransactionWithFee(sortedVerified.Last().NetworkFee + 1);
_unit.CanTransactionFitInPool(txBarelyFits).Should().Be(true);
}
[TestMethod]
public void VerifyCanTransactionFitInPoolWorksAsIntended()
{
AddTransactions(100);
VerifyCapacityThresholdForAttemptingToAddATransaction();
AddTransactions(50);
VerifyCapacityThresholdForAttemptingToAddATransaction();
AddTransactions(50);
VerifyCapacityThresholdForAttemptingToAddATransaction();
}
[TestMethod]
public void CapacityTestWithUnverifiedHighProirtyTransactions()
{
// Verify that unverified high priority transactions will not be pushed out of the queue by incoming
// low priority transactions
// Fill pool with high priority transactions
AddTransactions(99);
// move all to unverified
var block = new Block { Transactions = new Transaction[0] };
_unit.UpdatePoolForBlockPersisted(block, Blockchain.Singleton.GetSnapshot());
_unit.CanTransactionFitInPool(CreateTransaction()).Should().Be(true);
AddTransactions(1);
_unit.CanTransactionFitInPool(CreateTransactionWithFee(0)).Should().Be(false);
}
[TestMethod]
public void TestInvalidateAll()
{
AddTransactions(30);
_unit.UnverifiedSortedTxCount.Should().Be(0);
_unit.SortedTxCount.Should().Be(30);
_unit.InvalidateAllTransactions();
_unit.UnverifiedSortedTxCount.Should().Be(30);
_unit.SortedTxCount.Should().Be(0);
}
[TestMethod]
public void TestContainsKey()
{
AddTransactions(10);
var txToAdd = CreateTransaction();
_unit.TryAdd(txToAdd.Hash, txToAdd);
_unit.ContainsKey(txToAdd.Hash).Should().BeTrue();
_unit.InvalidateVerifiedTransactions();
_unit.ContainsKey(txToAdd.Hash).Should().BeTrue();
}
[TestMethod]
public void TestGetEnumerator()
{
AddTransactions(10);
_unit.InvalidateVerifiedTransactions();
IEnumerator<Transaction> enumerator = _unit.GetEnumerator();
foreach (Transaction tx in _unit)
{
enumerator.MoveNext();
enumerator.Current.Should().BeSameAs(tx);
}
}
[TestMethod]
public void TestIEnumerableGetEnumerator()
{
AddTransactions(10);
_unit.InvalidateVerifiedTransactions();
IEnumerable enumerable = _unit;
var enumerator = enumerable.GetEnumerator();
foreach (Transaction tx in _unit)
{
enumerator.MoveNext();
enumerator.Current.Should().BeSameAs(tx);
}
}
[TestMethod]
public void TestGetVerifiedTransactions()
{
var tx1 = CreateTransaction();
var tx2 = CreateTransaction();
_unit.TryAdd(tx1.Hash, tx1);
_unit.InvalidateVerifiedTransactions();
_unit.TryAdd(tx2.Hash, tx2);
IEnumerable<Transaction> enumerable = _unit.GetVerifiedTransactions();
enumerable.Count().Should().Be(1);
var enumerator = enumerable.GetEnumerator();
enumerator.MoveNext();
enumerator.Current.Should().BeSameAs(tx2);
}
[TestMethod]
public void TestReVerifyTopUnverifiedTransactionsIfNeeded()
{
_unit = new MemoryPool(TestBlockchain.TheNeoSystem, 600);
_unit.LoadPolicy(Blockchain.Singleton.GetSnapshot());
AddTransaction(CreateTransaction(100000001));
AddTransaction(CreateTransaction(100000001));
AddTransaction(CreateTransaction(100000001));
AddTransaction(CreateTransaction(1));
_unit.VerifiedCount.Should().Be(4);
_unit.UnVerifiedCount.Should().Be(0);
_unit.InvalidateVerifiedTransactions();
_unit.VerifiedCount.Should().Be(0);
_unit.UnVerifiedCount.Should().Be(4);
AddTransactions(511); // Max per block currently is 512
_unit.VerifiedCount.Should().Be(511);
_unit.UnVerifiedCount.Should().Be(4);
var result = _unit.ReVerifyTopUnverifiedTransactionsIfNeeded(1, Blockchain.Singleton.GetSnapshot());
result.Should().BeTrue();
_unit.VerifiedCount.Should().Be(512);
_unit.UnVerifiedCount.Should().Be(3);
result = _unit.ReVerifyTopUnverifiedTransactionsIfNeeded(2, Blockchain.Singleton.GetSnapshot());
result.Should().BeTrue();
_unit.VerifiedCount.Should().Be(514);
_unit.UnVerifiedCount.Should().Be(1);
result = _unit.ReVerifyTopUnverifiedTransactionsIfNeeded(3, Blockchain.Singleton.GetSnapshot());
result.Should().BeFalse();
_unit.VerifiedCount.Should().Be(515);
_unit.UnVerifiedCount.Should().Be(0);
}
[TestMethod]
public void TestTryAdd()
{
var tx1 = CreateTransaction();
_unit.TryAdd(tx1.Hash, tx1).Should().BeTrue();
_unit.TryAdd(tx1.Hash, tx1).Should().BeFalse();
_unit2.TryAdd(tx1.Hash, tx1).Should().BeFalse();
}
[TestMethod]
public void TestTryGetValue()
{
var tx1 = CreateTransaction();
_unit.TryAdd(tx1.Hash, tx1);
_unit.TryGetValue(tx1.Hash, out Transaction tx).Should().BeTrue();
tx.Should().BeEquivalentTo(tx1);
_unit.InvalidateVerifiedTransactions();
_unit.TryGetValue(tx1.Hash, out tx).Should().BeTrue();
tx.Should().BeEquivalentTo(tx1);
var tx2 = CreateTransaction();
_unit.TryGetValue(tx2.Hash, out tx).Should().BeFalse();
}
[TestMethod]
public void TestUpdatePoolForBlockPersisted()
{
var snapshot = Blockchain.Singleton.GetSnapshot();
byte[] transactionsPerBlock = { 0x18, 0x00, 0x00, 0x00 }; // 24
byte[] feePerByte = { 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00 }; // 1048576
StorageItem item1 = new StorageItem
{
Value = transactionsPerBlock
};
StorageItem item2 = new StorageItem
{
Value = feePerByte
};
var key1 = CreateStorageKey(Prefix_MaxTransactionsPerBlock);
var key2 = CreateStorageKey(Prefix_FeePerByte);
key1.ScriptHash = NativeContract.Policy.Hash;
key2.ScriptHash = NativeContract.Policy.Hash;
snapshot.Storages.Add(key1, item1);
snapshot.Storages.Add(key2, item2);
var tx1 = CreateTransaction();
var tx2 = CreateTransaction();
Transaction[] transactions = { tx1, tx2 };
_unit.TryAdd(tx1.Hash, tx1);
var block = new Block { Transactions = transactions };
_unit.UnVerifiedCount.Should().Be(0);
_unit.VerifiedCount.Should().Be(1);
_unit.UpdatePoolForBlockPersisted(block, snapshot);
_unit.UnVerifiedCount.Should().Be(0);
_unit.VerifiedCount.Should().Be(0);
}
public StorageKey CreateStorageKey(byte prefix, byte[] key = null)
{
StorageKey storageKey = new StorageKey
{
ScriptHash = null,
Key = new byte[sizeof(byte) + (key?.Length ?? 0)]
};
storageKey.Key[0] = prefix;
if (key != null)
Buffer.BlockCopy(key, 0, storageKey.Key, 1, key.Length);
return storageKey;
}
}
}
| |
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using System.IO;
public class AndroidVideoEditorUtil
{
private static readonly string videoPlayerFileName = "Assets/Oculus/SampleFramework/Core/Video/Plugins/Android/java/com/oculus/videoplayer/NativeVideoPlayer.java";
private static readonly string disabledPlayerFileName = videoPlayerFileName + ".DISABLED";
#if !UNITY_2018_2_OR_NEWER
private static readonly string gradleSourceSetPath = "$projectDir/../../Assets/Oculus/SampleFramework/Core/Video/Plugins/Android/java";
#endif
private static readonly string audio360PluginPath = "Assets/Oculus/SampleFramework/Core/Video/Plugins/Android/Audio360/audio360.aar";
private static readonly string audio360Exo29PluginPath = "Assets/Oculus/SampleFramework/Core/Video/Plugins/Android/Audio360/audio360-exo29.aar";
private static readonly string gradleTemplatePath = "Assets/Plugins/Android/mainTemplate.gradle";
private static readonly string disabledGradleTemplatePath = gradleTemplatePath + ".DISABLED";
private static readonly string internalGradleTemplatePath = Path.Combine(Path.Combine(GetBuildToolsDirectory(BuildTarget.Android), "GradleTemplates"), "mainTemplate.gradle");
private static string GetBuildToolsDirectory(BuildTarget bt)
{
return (string)(typeof(BuildPipeline).GetMethod("GetBuildToolsDirectory", System.Reflection.BindingFlags.Static | System.Reflection.BindingFlags.NonPublic).Invoke(null, new object[] { bt }));
}
[MenuItem("Oculus/Video/Enable Native Android Video Player")]
public static void EnableNativeVideoPlayer()
{
// rename NativeJavaPlayer.java.DISABLED to NativeJavaPlayer.java
if (File.Exists(disabledPlayerFileName))
{
File.Move(disabledPlayerFileName, videoPlayerFileName);
File.Move(disabledPlayerFileName + ".meta", videoPlayerFileName + ".meta");
}
AssetDatabase.ImportAsset(videoPlayerFileName);
AssetDatabase.DeleteAsset(disabledPlayerFileName);
// Enable audio plugins
PluginImporter audio360 = (PluginImporter)AssetImporter.GetAtPath(audio360PluginPath);
PluginImporter audio360exo29 = (PluginImporter)AssetImporter.GetAtPath(audio360Exo29PluginPath);
if (audio360 != null && audio360exo29 != null)
{
audio360.SetCompatibleWithPlatform(BuildTarget.Android, true);
audio360exo29.SetCompatibleWithPlatform(BuildTarget.Android, true);
audio360.SaveAndReimport();
audio360exo29.SaveAndReimport();
}
// Enable gradle build with exoplayer
EditorUserBuildSettings.androidBuildSystem = AndroidBuildSystem.Gradle;
if (!File.Exists(gradleTemplatePath))
{
if (File.Exists(gradleTemplatePath + ".DISABLED"))
{
File.Move(disabledGradleTemplatePath, gradleTemplatePath);
File.Move(disabledGradleTemplatePath + ".meta", gradleTemplatePath+".meta");
}
else
{
File.Copy(internalGradleTemplatePath, gradleTemplatePath);
}
AssetDatabase.ImportAsset(gradleTemplatePath);
}
// parse the gradle file to check the current version:
string currentFile = File.ReadAllText(gradleTemplatePath);
List<string> lines = new List<string>(currentFile.Split('\n'));
var gradleVersion = new System.Text.RegularExpressions.Regex("com.android.tools.build:gradle:([0-9]+\\.[0-9]+\\.[0-9]+)").Match(currentFile).Groups[1].Value;
if (gradleVersion == "2.3.0")
{
// add google() to buildscript/repositories
int buildscriptRepositories = GoToSection("buildscript.repositories", lines);
if (FindInScope("google\\(\\)", buildscriptRepositories + 1, lines) == -1)
{
lines.Insert(GetScopeEnd(buildscriptRepositories + 1, lines), "\t\tgoogle()");
}
// add google() and jcenter() to allprojects/repositories
int allprojectsRepositories = GoToSection("allprojects.repositories", lines);
if (FindInScope("google\\(\\)", allprojectsRepositories + 1, lines) == -1)
{
lines.Insert(GetScopeEnd(allprojectsRepositories + 1, lines), "\t\tgoogle()");
}
if (FindInScope("jcenter\\(\\)", allprojectsRepositories + 1, lines) == -1)
{
lines.Insert(GetScopeEnd(allprojectsRepositories + 1, lines), "\t\tjcenter()");
}
}
// add "compile 'com.google.android.exoplayer:exoplayer:2.9.5'" to dependencies
int dependencies = GoToSection("dependencies", lines);
if (FindInScope("com\\.google\\.android\\.exoplayer:exoplayer", dependencies + 1, lines) == -1)
{
lines.Insert(GetScopeEnd(dependencies + 1, lines), "\tcompile 'com.google.android.exoplayer:exoplayer:2.9.5'");
}
int android = GoToSection("android", lines);
// add compileOptions to add Java 1.8 compatibility
if (FindInScope("compileOptions", android + 1, lines) == -1)
{
int compileOptionsIndex = GetScopeEnd(android + 1, lines);
lines.Insert(compileOptionsIndex, "\t}");
lines.Insert(compileOptionsIndex, "\t\ttargetCompatibility JavaVersion.VERSION_1_8");
lines.Insert(compileOptionsIndex, "\t\tsourceCompatibility JavaVersion.VERSION_1_8");
lines.Insert(compileOptionsIndex, "\tcompileOptions {");
}
// add sourceSets if Version < 2018.2
#if !UNITY_2018_2_OR_NEWER
if (FindInScope("sourceSets\\.main\\.java\\.srcDir", android + 1, lines) == -1)
{
lines.Insert(GetScopeEnd(android + 1, lines), "\tsourceSets.main.java.srcDir \"" + gradleSourceSetPath + "\"");
}
#endif
File.WriteAllText(gradleTemplatePath, string.Join("\n", lines.ToArray()));
}
[MenuItem("Oculus/Video/Disable Native Android Video Player")]
public static void DisableNativeVideoPlayer()
{
if (File.Exists(videoPlayerFileName))
{
File.Move(videoPlayerFileName, disabledPlayerFileName);
File.Move(videoPlayerFileName + ".meta", disabledPlayerFileName + ".meta");
}
AssetDatabase.ImportAsset(disabledPlayerFileName);
AssetDatabase.DeleteAsset(videoPlayerFileName);
// Disable audio plugins
PluginImporter audio360 = (PluginImporter)AssetImporter.GetAtPath(audio360PluginPath);
PluginImporter audio360exo29 = (PluginImporter)AssetImporter.GetAtPath(audio360Exo29PluginPath);
if (audio360 != null && audio360exo29 != null)
{
audio360.SetCompatibleWithPlatform(BuildTarget.Android, false);
audio360exo29.SetCompatibleWithPlatform(BuildTarget.Android, false);
audio360.SaveAndReimport();
audio360exo29.SaveAndReimport();
}
// remove exoplayer and sourcesets from gradle file (leave other parts since they are harmless).
if (File.Exists(gradleTemplatePath))
{
// parse the gradle file to check the current version:
string currentFile = File.ReadAllText(gradleTemplatePath);
List<string> lines = new List<string>(currentFile.Split('\n'));
int dependencies = GoToSection("dependencies", lines);
int exoplayer = FindInScope("com\\.google\\.android\\.exoplayer:exoplayer", dependencies + 1, lines);
if (exoplayer != -1)
{
lines.RemoveAt(exoplayer);
}
int android = GoToSection("android", lines);
int sourceSets = FindInScope("sourceSets\\.main\\.java\\.srcDir", android + 1, lines);
if (sourceSets != -1)
{
lines.RemoveAt(sourceSets);
}
File.WriteAllText(gradleTemplatePath, string.Join("\n", lines.ToArray()));
}
}
private static int GoToSection(string section, List<string> lines)
{
return GoToSection(section, 0, lines);
}
private static int GoToSection(string section, int start, List<string> lines)
{
var sections = section.Split('.');
int p = start - 1;
for (int i = 0; i < sections.Length; i++)
{
p = FindInScope("\\s*" + sections[i] + "\\s*\\{\\s*", p + 1, lines);
}
return p;
}
private static int FindInScope(string search, int start, List<string> lines)
{
var regex = new System.Text.RegularExpressions.Regex(search);
int depth = 0;
for (int i = start; i < lines.Count; i++)
{
if (depth == 0 && regex.IsMatch(lines[i]))
{
return i;
}
// count the number of open and close braces. If we leave the current scope, break
if (lines[i].Contains("{"))
{
depth++;
}
if (lines[i].Contains("}"))
{
depth--;
}
if (depth < 0)
{
break;
}
}
return -1;
}
private static int GetScopeEnd(int start, List<string> lines)
{
int depth = 0;
for (int i = start; i < lines.Count; i++)
{
// count the number of open and close braces. If we leave the current scope, break
if (lines[i].Contains("{"))
{
depth++;
}
if (lines[i].Contains("}"))
{
depth--;
}
if (depth < 0)
{
return i;
}
}
return -1;
}
}
| |
/*******************************************************************************
* Copyright 2008-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and
* limitations under the License.
* *****************************************************************************
* __ _ _ ___
* ( )( \/\/ )/ __)
* /__\ \ / \__ \
* (_)(_) \/\/ (___/
*
* AWS SDK for .NET
* API Version: 2012-11-05
*/
using System;
using System.Net;
using Amazon.Util;
namespace Amazon.SQS
{
/// <summary>
/// Configuration for accessing Amazon SQS service
/// </summary>
public class AmazonSQSConfig
{
private string serviceVersion = "2012-11-05";
private string authRegion = null;
private RegionEndpoint regionEndpoint;
private string serviceURL = "https://queue.amazonaws.com";
private string userAgent = Amazon.Util.AWSSDKUtils.SDKUserAgent;
private string signatureVersion = "4";
private string signatureMethod = "HmacSHA256";
private string proxyHost;
private int proxyPort = -1;
private int maxErrorRetry = 3;
private bool fUseSecureString = true;
private string proxyUsername;
private string proxyPassword;
private int? connectionLimit;
private ICredentials proxyCredentials;
/// <summary>
/// Gets Service Version
/// </summary>
public string ServiceVersion
{
get { return this.serviceVersion; }
}
/// <summary>
/// Gets and sets of the signatureMethod property.
/// </summary>
public string SignatureMethod
{
get { return this.signatureMethod; }
set { this.signatureMethod = value; }
}
/// <summary>
/// Sets the SignatureMethod property
/// </summary>
/// <param name="signatureMethod">SignatureMethod property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithSignatureMethod(string signatureMethod)
{
this.signatureMethod = signatureMethod;
return this;
}
/// <summary>
/// Checks if SignatureMethod property is set
/// </summary>
/// <returns>true if SignatureMethod property is set</returns>
public bool IsSetSignatureMethod()
{
return this.signatureMethod != null;
}
/// <summary>
/// Gets and sets of the SignatureVersion property.
/// </summary>
public string SignatureVersion
{
get { return this.signatureVersion; }
set { this.signatureVersion = value; }
}
/// <summary>
/// Sets the SignatureVersion property
/// </summary>
/// <param name="signatureVersion">SignatureVersion property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithSignatureVersion(string signatureVersion)
{
this.signatureVersion = signatureVersion;
return this;
}
/// <summary>
/// Checks if SignatureVersion property is set
/// </summary>
/// <returns>true if SignatureVersion property is set</returns>
public bool IsSetSignatureVersion()
{
return this.signatureVersion != null;
}
/// <summary>
/// Gets and sets of the UserAgent property.
/// </summary>
public string UserAgent
{
get { return this.userAgent; }
set { this.userAgent = value; }
}
/// <summary>
/// Sets the UserAgent property
/// </summary>
/// <param name="userAgent">UserAgent property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithUserAgent(string userAgent)
{
this.userAgent = userAgent;
return this;
}
/// <summary>
/// Checks if UserAgent property is set
/// </summary>
/// <returns>true if UserAgent property is set</returns>
public bool IsSetUserAgent()
{
return this.userAgent != null;
}
/// <summary>
/// Gets and sets the RegionEndpoint property. The region constant to use that
/// determines the endpoint to use. If this is not set
/// then the client will fallback to the value of ServiceURL.
/// </summary>
public RegionEndpoint RegionEndpoint
{
get
{
return regionEndpoint;
}
set
{
this.regionEndpoint = value;
}
}
/// <summary>
/// The constant used to lookup in the region hash the endpoint.
/// </summary>
internal string RegionEndpointServiceName
{
get { return "sqs"; }
}
/// <summary>
/// Gets and sets the AuthenticationRegion property.
/// Used in AWS4 request signing, this is an optional property;
/// change it only if the region cannot be determined from the
/// service endpoint.
/// </summary>
public string AuthenticationRegion
{
get { return this.authRegion; }
set { this.authRegion = value; }
}
/// <summary>
/// Gets and sets of the ServiceURL property.
/// This is an optional property; change it
/// only if you want to try a different service
/// endpoint or want to switch between https and http.
/// </summary>
public string ServiceURL
{
get { return this.serviceURL; }
set { this.serviceURL = value; }
}
/// <summary>
/// Sets the ServiceURL property
/// </summary>
/// <param name="serviceURL">ServiceURL property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithServiceURL(string serviceURL)
{
this.serviceURL = serviceURL;
return this;
}
/// <summary>
/// Checks if ServiceURL property is set
/// </summary>
/// <returns>true if ServiceURL property is set</returns>
public bool IsSetServiceURL()
{
return this.serviceURL != null;
}
/// <summary>
/// Gets and sets of the ProxyHost property.
/// </summary>
public string ProxyHost
{
get { return this.proxyHost; }
set { this.proxyHost = value; }
}
/// <summary>
/// Sets the ProxyHost property
/// </summary>
/// <param name="proxyHost">ProxyHost property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithProxyHost(string proxyHost)
{
this.proxyHost = proxyHost;
return this;
}
/// <summary>
/// Checks if ProxyHost property is set
/// </summary>
/// <returns>true if ProxyHost property is set</returns>
public bool IsSetProxyHost()
{
return this.proxyHost != null;
}
/// <summary>
/// Gets and sets of the ProxyPort property.
/// </summary>
public int ProxyPort
{
get { return this.proxyPort; }
set { this.proxyPort = value; }
}
/// <summary>
/// Sets the ProxyPort property
/// </summary>
/// <param name="proxyPort">ProxyPort property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithProxyPort(int proxyPort)
{
this.proxyPort = proxyPort;
return this;
}
/// <summary>
/// Checks if ProxyPort property is set
/// </summary>
/// <returns>true if ProxyPort property is set</returns>
public bool IsSetProxyPort()
{
return this.proxyPort >= 0;
}
/// <summary>
/// Gets and sets of the MaxErrorRetry property.
/// </summary>
public int MaxErrorRetry
{
get { return this.maxErrorRetry; }
set { this.maxErrorRetry = value; }
}
/// <summary>
/// Sets the MaxErrorRetry property
/// </summary>
/// <param name="maxErrorRetry">MaxErrorRetry property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithMaxErrorRetry(int maxErrorRetry)
{
this.maxErrorRetry = maxErrorRetry;
return this;
}
/// <summary>
/// Checks if MaxErrorRetry property is set
/// </summary>
/// <returns>true if MaxErrorRetry property is set</returns>
public bool IsSetMaxErrorRetry()
{
return this.maxErrorRetry >= 0;
}
/// <summary>
/// Gets and Sets the UseSecureStringForAwsSecretKey property.
/// By default, the AWS Secret Access Key is stored
/// in a SecureString (true) - this is one of the secure
/// ways to store a secret provided by the .NET Framework.
/// But, the use of SecureStrings is not supported in Medium
/// Trust Windows Hosting environments. If you are building an
/// ASP.NET application that needs to run with Medium Trust,
/// set this property to false, and the client will
/// not save your AWS Secret Key in a secure string. Changing
/// the default to false can result in the Secret Key being
/// vulnerable; please use this property judiciously.
/// </summary>
/// <remarks>Storing the AWS Secret Access Key is not
/// recommended unless absolutely necessary.
/// </remarks>
/// <seealso cref="T:System.Security.SecureString"/>
public bool UseSecureStringForAwsSecretKey
{
get { return this.fUseSecureString; }
set { this.fUseSecureString = value; }
}
/// <summary>
/// Sets the UseSecureString property.
/// By default, the AWS Secret Access Key is stored
/// in a SecureString (true) - this is one of the secure
/// ways to store a secret provided by the .NET Framework.
/// But, the use of SecureStrings is not supported in Medium
/// Trust Windows Hosting environments. If you are building an
/// ASP.NET application that needs to run with Medium Trust,
/// set this property to false, and the client will
/// not save your AWS Secret Key in a secure string. Changing
/// the default to false can result in the Secret Key being
/// vulnerable; please use this property judiciously.
/// </summary>
/// <param name="fSecure">
/// Whether a secure string should be used or not.
/// </param>
/// <returns>The Config object with the property set</returns>
/// <remarks>Storing the AWS Secret Access Key is not
/// recommended unless absolutely necessary.
/// </remarks>
/// <seealso cref="T:System.Security.SecureString"/>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithUseSecureStringForAwsSecretKey(bool fSecure)
{
fUseSecureString = fSecure;
return this;
}
/// <summary>
/// Gets and sets the ProxyUsername property.
/// Used in conjunction with the ProxyPassword
/// property to authenticate requests with the
/// specified Proxy server.
/// </summary>
[Obsolete("Use ProxyCredentials instead")]
public string ProxyUsername
{
get { return this.proxyUsername; }
set { this.proxyUsername = value; }
}
/// <summary>
/// Sets the ProxyUsername property
/// </summary>
/// <param name="userName">Value for the ProxyUsername property</param>
/// <returns>this instance</returns>
[Obsolete("Use WithProxyCredentials instead")]
public AmazonSQSConfig WithProxyUsername(string userName)
{
this.proxyUsername = userName;
return this;
}
/// <summary>
/// Checks if ProxyUsername property is set
/// </summary>
/// <returns>true if ProxyUsername property is set</returns>
internal bool IsSetProxyUsername()
{
return !System.String.IsNullOrEmpty(this.proxyUsername);
}
/// <summary>
/// Gets and sets the ProxyPassword property.
/// Used in conjunction with the ProxyUsername
/// property to authenticate requests with the
/// specified Proxy server.
/// </summary>
/// <remarks>
/// If this property isn't set, String.Empty is used as
/// the proxy password. This property isn't
/// used if ProxyUsername is null or empty.
/// </remarks>
[Obsolete("Use ProxyCredentials instead")]
public string ProxyPassword
{
get { return this.proxyPassword; }
set { this.proxyPassword = value; }
}
/// <summary>
/// Sets the ProxyPassword property.
/// Used in conjunction with the ProxyUsername
/// property to authenticate requests with the
/// specified Proxy server.
/// </summary>
/// <remarks>
/// If this property isn't set, String.Empty is used as
/// the proxy password. This property isn't
/// used if ProxyUsername is null or empty.
/// </remarks>
/// <param name="password">ProxyPassword property</param>
/// <returns>this instance</returns>
[Obsolete("Use WithProxyCredentials instead")]
public AmazonSQSConfig WithProxyPassword(string password)
{
this.proxyPassword = password;
return this;
}
/// <summary>
/// Checks if ProxyPassword property is set
/// </summary>
/// <returns>true if ProxyPassword property is set</returns>
internal bool IsSetProxyPassword()
{
return !System.String.IsNullOrEmpty(this.proxyPassword);
}
/// <summary>
/// Credentials to use with a proxy.
/// </summary>
public ICredentials ProxyCredentials
{
get
{
ICredentials credentials = this.proxyCredentials;
if (credentials == null && this.IsSetProxyUsername())
{
credentials = new NetworkCredential(this.proxyUsername, this.proxyPassword ?? String.Empty);
}
return credentials;
}
set { this.proxyCredentials = value; }
}
/// <summary>
/// Sets the ProxyCredentials property.
/// </summary>
/// <param name="proxyCredentials">ProxyCredentials property</param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public AmazonSQSConfig WithProxyCredentials(ICredentials proxyCredentials)
{
this.proxyCredentials = proxyCredentials;
return this;
}
/// <summary>
/// Checks if ProxyCredentials property is set
/// </summary>
/// <returns>true if ProxyCredentials property is set</returns>
internal bool IsSetProxyCredentials()
{
return (this.ProxyCredentials != null);
}
/// <summary>
/// Gets and sets the connection limit set on the ServicePoint for the WebRequest.
/// Default value is 50 connections unless ServicePointManager.DefaultConnectionLimit is set in
/// which case ServicePointManager.DefaultConnectionLimit will be used as the default.
/// </summary>
public int ConnectionLimit
{
get { return AWSSDKUtils.GetConnectionLimit(this.connectionLimit); }
set { this.connectionLimit = value; }
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Sandboxable.Microsoft.Azure.KeyVault.Internal;
namespace Sandboxable.Microsoft.Azure.KeyVault.Internal
{
internal static partial class SecretOperationsExtensions
{
/// <summary>
/// Delete the specified secret
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent Delete(this ISecretOperations operations, string secretIdentifier)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).DeleteAsync(secretIdentifier);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Delete the specified secret
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> DeleteAsync(this ISecretOperations operations, string secretIdentifier)
{
return operations.DeleteAsync(secretIdentifier, CancellationToken.None);
}
/// <summary>
/// Gets a secret
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent Get(this ISecretOperations operations, string secretIdentifier)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).GetAsync(secretIdentifier);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets a secret
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> GetAsync(this ISecretOperations operations, string secretIdentifier)
{
return operations.GetAsync(secretIdentifier, CancellationToken.None);
}
/// <summary>
/// List the secrets in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='vault'>
/// Required.
/// </param>
/// <param name='top'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent List(this ISecretOperations operations, string vault, int? top)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).ListAsync(vault, top);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List the secrets in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='vault'>
/// Required.
/// </param>
/// <param name='top'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> ListAsync(this ISecretOperations operations, string vault, int? top)
{
return operations.ListAsync(vault, top, CancellationToken.None);
}
/// <summary>
/// List the next page of secrets in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='nextLink'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent ListNext(this ISecretOperations operations, string nextLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).ListNextAsync(nextLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List the next page of secrets in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='nextLink'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> ListNextAsync(this ISecretOperations operations, string nextLink)
{
return operations.ListNextAsync(nextLink, CancellationToken.None);
}
/// <summary>
/// List the versions of a secret in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='vault'>
/// Required.
/// </param>
/// <param name='secretName'>
/// Required.
/// </param>
/// <param name='top'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent ListVersions(this ISecretOperations operations, string vault, string secretName, int? top)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).ListVersionsAsync(vault, secretName, top);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List the versions of a secret in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='vault'>
/// Required.
/// </param>
/// <param name='secretName'>
/// Required.
/// </param>
/// <param name='top'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> ListVersionsAsync(this ISecretOperations operations, string vault, string secretName, int? top)
{
return operations.ListVersionsAsync(vault, secretName, top, CancellationToken.None);
}
/// <summary>
/// List the versions of a secret in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='nextLink'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent ListVersionsNext(this ISecretOperations operations, string nextLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).ListVersionsNextAsync(nextLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List the versions of a secret in the specified vault
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='nextLink'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> ListVersionsNextAsync(this ISecretOperations operations, string nextLink)
{
return operations.ListVersionsNextAsync(nextLink, CancellationToken.None);
}
/// <summary>
/// Sets a secret in the specified vault.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <param name='request'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent Set(this ISecretOperations operations, string secretIdentifier, SecretRequestMessageWithRawJsonContent request)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).SetAsync(secretIdentifier, request);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Sets a secret in the specified vault.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <param name='request'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> SetAsync(this ISecretOperations operations, string secretIdentifier, SecretRequestMessageWithRawJsonContent request)
{
return operations.SetAsync(secretIdentifier, request, CancellationToken.None);
}
/// <summary>
/// Update the specified secret
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <param name='request'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static SecretResponseMessageWithRawJsonContent Update(this ISecretOperations operations, string secretIdentifier, SecretRequestMessageWithRawJsonContent request)
{
return Task.Factory.StartNew((object s) =>
{
return ((ISecretOperations)s).UpdateAsync(secretIdentifier, request);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Update the specified secret
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.KeyVault.Internal.ISecretOperations.
/// </param>
/// <param name='secretIdentifier'>
/// Required.
/// </param>
/// <param name='request'>
/// Required.
/// </param>
/// <returns>
/// Represents the response to a secret operation request.
/// </returns>
public static Task<SecretResponseMessageWithRawJsonContent> UpdateAsync(this ISecretOperations operations, string secretIdentifier, SecretRequestMessageWithRawJsonContent request)
{
return operations.UpdateAsync(secretIdentifier, request, CancellationToken.None);
}
}
}
| |
/*
The MIT License (MIT)
Copyright (c) 2016 Maksim Volkau
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included AddOrUpdateServiceFactory
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
namespace DryIoc
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
/// <summary>Methods to work with immutable arrays, and general array sugar.</summary>
public static class ArrayTools
{
/// <summary>Returns true if array is null or have no items.</summary> <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Source array to check.</param> <returns>True if null or has no items, false otherwise.</returns>
public static bool IsNullOrEmpty<T>(this T[] source)
{
return source == null || source.Length == 0;
}
/// <summary>Returns empty array instead of null, or source array otherwise.</summary> <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Source array.</param> <returns>Empty array or source.</returns>
public static T[] EmptyIfNull<T>(this T[] source)
{
return source ?? Empty<T>();
}
/// <summary>Returns source enumerable if it is array, otherwise converts source to array.</summary>
/// <typeparam name="T">Array item type.</typeparam>
/// <param name="source">Source enumerable.</param>
/// <returns>Source enumerable or its array copy.</returns>
public static T[] ToArrayOrSelf<T>(this IEnumerable<T> source)
{
return source is T[] ? (T[])source : source.ToArray();
}
/// <summary>Returns new array consisting from all items from source array then all items from added array.
/// If source is null or empty, then added array will be returned.
/// If added is null or empty, then source will be returned.</summary>
/// <typeparam name="T">Array item type.</typeparam>
/// <param name="source">Array with leading items.</param>
/// <param name="added">Array with following items.</param>
/// <returns>New array with items of source and added arrays.</returns>
public static T[] Append<T>(this T[] source, params T[] added)
{
if (added == null || added.Length == 0)
return source;
if (source == null || source.Length == 0)
return added;
var result = new T[source.Length + added.Length];
Array.Copy(source, 0, result, 0, source.Length);
if (added.Length == 1)
result[source.Length] = added[0];
else
Array.Copy(added, 0, result, source.Length, added.Length);
return result;
}
/// <summary>Returns new array with <paramref name="value"/> appended,
/// or <paramref name="value"/> at <paramref name="index"/>, if specified.
/// If source array could be null or empty, then single value item array will be created despite any index.</summary>
/// <typeparam name="T">Array item type.</typeparam>
/// <param name="source">Array to append value to.</param>
/// <param name="value">Value to append.</param>
/// <param name="index">(optional) Index of value to update.</param>
/// <returns>New array with appended or updated value.</returns>
public static T[] AppendOrUpdate<T>(this T[] source, T value, int index = -1)
{
if (source == null || source.Length == 0)
return new[] { value };
var sourceLength = source.Length;
index = index < 0 ? sourceLength : index;
var result = new T[index < sourceLength ? sourceLength : sourceLength + 1];
Array.Copy(source, result, sourceLength);
result[index] = value;
return result;
}
/// <summary>Calls predicate on each item in <paramref name="source"/> array until predicate returns true,
/// then method will return this item index, or if predicate returns false for each item, method will return -1.</summary>
/// <typeparam name="T">Type of array items.</typeparam>
/// <param name="source">Source array: if null or empty, then method will return -1.</param>
/// <param name="predicate">Delegate to evaluate on each array item until delegate returns true.</param>
/// <returns>Index of item for which predicate returns true, or -1 otherwise.</returns>
public static int IndexOf<T>(this T[] source, Func<T, bool> predicate)
{
if (source != null && source.Length != 0)
for (var i = 0; i < source.Length; ++i)
if (predicate(source[i]))
return i;
return -1;
}
/// <summary>Looks up for item in source array equal to provided value, and returns its index, or -1 if not found.</summary>
/// <typeparam name="T">Type of array items.</typeparam>
/// <param name="source">Source array: if null or empty, then method will return -1.</param>
/// <param name="value">Value to look up.</param>
/// <returns>Index of item equal to value, or -1 item is not found.</returns>
public static int IndexOf<T>(this T[] source, T value)
{
if (source != null && source.Length != 0)
for (var i = 0; i < source.Length; ++i)
{
var item = source[i];
if (ReferenceEquals(item, value) || Equals(item, value))
return i;
}
return -1;
}
/// <summary>Produces new array without item at specified <paramref name="index"/>.
/// Will return <paramref name="source"/> array if index is out of bounds, or source is null/empty.</summary>
/// <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Input array.</param> <param name="index">Index if item to remove.</param>
/// <returns>New array with removed item at index, or input source array if index is not in array.</returns>
public static T[] RemoveAt<T>(this T[] source, int index)
{
if (source == null || source.Length == 0 || index < 0 || index >= source.Length)
return source;
if (index == 0 && source.Length == 1)
return new T[0];
var result = new T[source.Length - 1];
if (index != 0)
Array.Copy(source, 0, result, 0, index);
if (index != result.Length)
Array.Copy(source, index + 1, result, index, result.Length - index);
return result;
}
/// <summary>Looks for item in array using equality comparison, and returns new array with found item remove, or original array if not item found.</summary>
/// <typeparam name="T">Type of array item.</typeparam>
/// <param name="source">Input array.</param> <param name="value">Value to find and remove.</param>
/// <returns>New array with value removed or original array if value is not found.</returns>
public static T[] Remove<T>(this T[] source, T value)
{
return source.RemoveAt(source.IndexOf(value));
}
/// <summary>Returns singleton empty array of provided type.</summary>
/// <typeparam name="T">Array item type.</typeparam> <returns>Empty array.</returns>
public static T[] Empty<T>()
{
return EmptyArray<T>.Value;
}
private static class EmptyArray<T>
{
public static readonly T[] Value = new T[0];
}
}
/// <summary>Wrapper that provides optimistic-concurrency Swap operation implemented using <see cref="Ref.Swap{T}"/>.</summary>
/// <typeparam name="T">Type of object to wrap.</typeparam>
public sealed class Ref<T> where T : class
{
/// <summary>Gets the wrapped value.</summary>
public T Value { get { return _value; } }
/// <summary>Creates ref to object, optionally with initial value provided.</summary>
/// <param name="initialValue">(optional) Initial value.</param>
public Ref(T initialValue = default(T))
{
_value = initialValue;
}
/// <summary>Exchanges currently hold object with <paramref name="getNewValue"/> - see <see cref="Ref.Swap{T}"/> for details.</summary>
/// <param name="getNewValue">Delegate to produce new object value from current one passed as parameter.</param>
/// <returns>Returns old object value the same way as <see cref="Interlocked.Exchange(ref int,int)"/></returns>
/// <remarks>Important: <paramref name="getNewValue"/> May be called multiple times to retry update with value concurrently changed by other code.</remarks>
public T Swap(Func<T, T> getNewValue)
{
return Ref.Swap(ref _value, getNewValue);
}
/// <summary>Just sets new value ignoring any intermingled changes.</summary>
/// <param name="newValue"></param> <returns>old value</returns>
public T Swap(T newValue)
{
return Interlocked.Exchange(ref _value, newValue);
}
/// <summary>Compares current Referred value with <paramref name="currentValue"/> and if equal replaces current with <paramref name="newValue"/></summary>
/// <param name="currentValue"></param> <param name="newValue"></param>
/// <returns>True if current value was replaced with new value, and false if current value is outdated (already changed by other party).</returns>
/// <example><c>[!CDATA[
/// var value = SomeRef.Value;
/// if (!SomeRef.TrySwapIfStillCurrent(value, Update(value))
/// SomeRef.Swap(v => Update(v)); // fallback to normal Swap with delegate allocation
/// ]]</c></example>
public bool TrySwapIfStillCurrent(T currentValue, T newValue)
{
return Interlocked.CompareExchange(ref _value, newValue, currentValue) == currentValue;
}
private T _value;
}
/// <summary>Provides optimistic-concurrency consistent <see cref="Swap{T}"/> operation.</summary>
public static class Ref
{
/// <summary>Factory for <see cref="Ref{T}"/> with type of value inference.</summary>
/// <typeparam name="T">Type of value to wrap.</typeparam>
/// <param name="value">Initial value to wrap.</param>
/// <returns>New ref.</returns>
public static Ref<T> Of<T>(T value) where T : class
{
return new Ref<T>(value);
}
/// <summary>Creates new ref to the value of original ref.</summary> <typeparam name="T">Ref value type.</typeparam>
/// <param name="original">Original ref.</param> <returns>New ref to original value.</returns>
public static Ref<T> NewRef<T>(this Ref<T> original) where T : class
{
return Of(original.Value);
}
/// <summary>First, it evaluates new value using <paramref name="getNewValue"/> function.
/// Second, it checks that original value is not changed.
/// If it is changed it will retry first step, otherwise it assigns new value and returns original (the one used for <paramref name="getNewValue"/>).</summary>
/// <typeparam name="T">Type of value to swap.</typeparam>
/// <param name="value">Reference to change to new value</param>
/// <param name="getNewValue">Delegate to get value from old one.</param>
/// <returns>Old/original value. By analogy with <see cref="Interlocked.Exchange(ref int,int)"/>.</returns>
/// <remarks>Important: <paramref name="getNewValue"/> May be called multiple times to retry update with value concurrently changed by other code.</remarks>
public static T Swap<T>(ref T value, Func<T, T> getNewValue) where T : class
{
var retryCount = 0;
while (true)
{
var oldValue = value;
var newValue = getNewValue(oldValue);
if (Interlocked.CompareExchange(ref value, newValue, oldValue) == oldValue)
return oldValue;
if (++retryCount > RETRY_COUNT_UNTIL_THROW)
throw new InvalidOperationException(_errorRetryCountExceeded);
}
}
private const int RETRY_COUNT_UNTIL_THROW = 50;
private static readonly string _errorRetryCountExceeded =
"Ref retried to Update for " + RETRY_COUNT_UNTIL_THROW + " times But there is always someone else intervened.";
}
/// <summary>Immutable Key-Value pair. It is reference type (could be check for null),
/// which is different from System value type <see cref="KeyValuePair{TKey,TValue}"/>.
/// In addition provides <see cref="Equals"/> and <see cref="GetHashCode"/> implementations.</summary>
/// <typeparam name="K">Type of Key.</typeparam><typeparam name="V">Type of Value.</typeparam>
public sealed class KV<K, V>
{
/// <summary>Key.</summary>
public readonly K Key;
/// <summary>Value.</summary>
public readonly V Value;
/// <summary>Creates Key-Value object by providing key and value. Does Not check either one for null.</summary>
/// <param name="key">key.</param><param name="value">value.</param>
public KV(K key, V value)
{
Key = key;
Value = value;
}
/// <summary>Creates nice string view.</summary><returns>String representation.</returns>
public override string ToString()
{
var s = new StringBuilder('{');
if (Key != null)
s.Append(Key);
s.Append(',');
if (Value != null)
s.Append(Value);
s.Append('}');
return s.ToString();
}
/// <summary>Returns true if both key and value are equal to corresponding key-value of other object.</summary>
/// <param name="obj">Object to check equality with.</param> <returns>True if equal.</returns>
public override bool Equals(object obj)
{
var other = obj as KV<K, V>;
return other != null
&& (ReferenceEquals(other.Key, Key) || Equals(other.Key, Key))
&& (ReferenceEquals(other.Value, Value) || Equals(other.Value, Value));
}
/// <summary>Combines key and value hash code. R# generated default implementation.</summary>
/// <returns>Combined hash code for key-value.</returns>
public override int GetHashCode()
{
unchecked
{
return ((object)Key == null ? 0 : Key.GetHashCode() * 397)
^ ((object)Value == null ? 0 : Value.GetHashCode());
}
}
}
/// <summary>Delegate for changing value from old one to some new based on provided new value.</summary>
/// <typeparam name="V">Type of values.</typeparam>
/// <param name="oldValue">Existing value.</param>
/// <param name="newValue">New value passed to Update.. method.</param>
/// <returns>Changed value.</returns>
public delegate V Update<V>(V oldValue, V newValue);
// todo: V3: Rename to ImTree
/// <summary>Simple immutable AVL tree with integer keys and object values.</summary>
public sealed class ImTreeMapIntToObj
{
/// <summary>Empty tree to start with.</summary>
public static readonly ImTreeMapIntToObj Empty = new ImTreeMapIntToObj();
/// <summary>Key.</summary>
public readonly int Key;
/// <summary>Value.</summary>
public readonly object Value;
/// <summary>Left sub-tree/branch, or empty.</summary>
public readonly ImTreeMapIntToObj Left;
/// <summary>Right sub-tree/branch, or empty.</summary>
public readonly ImTreeMapIntToObj Right;
/// <summary>Height of longest sub-tree/branch plus 1. It is 0 for empty tree, and 1 for single node tree.</summary>
public readonly int Height;
/// <summary>Returns true is tree is empty.</summary>
public bool IsEmpty { get { return Height == 0; } }
/// <summary>Returns new tree with added or updated value for specified key.</summary>
/// <param name="key"></param> <param name="value"></param>
/// <returns>New tree.</returns>
public ImTreeMapIntToObj AddOrUpdate(int key, object value)
{
return AddOrUpdate(key, value, false, null);
}
/// <summary>Delegate to calculate new value from and old and a new value.</summary>
/// <param name="oldValue">Old</param> <param name="newValue">New</param> <returns>Calculated result.</returns>
public delegate object UpdateValue(object oldValue, object newValue);
/// <summary>Returns new tree with added or updated value for specified key.</summary>
/// <param name="key">Key</param> <param name="value">Value</param>
/// <param name="updateValue">(optional) Delegate to calculate new value from and old and a new value.</param>
/// <returns>New tree.</returns>
public ImTreeMapIntToObj AddOrUpdate(int key, object value, UpdateValue updateValue)
{
return AddOrUpdate(key, value, false, updateValue);
}
/// <summary>Returns new tree with updated value for the key, Or the same tree if key was not found.</summary>
/// <param name="key"></param> <param name="value"></param>
/// <returns>New tree if key is found, or the same tree otherwise.</returns>
public ImTreeMapIntToObj Update(int key, object value)
{
return AddOrUpdate(key, value, true, null);
}
/// <summary>Get value for found key or null otherwise.</summary>
/// <param name="key"></param> <returns>Found value or null.</returns>
public object GetValueOrDefault(int key)
{
var tree = this;
while (tree.Height != 0 && tree.Key != key)
tree = key < tree.Key ? tree.Left : tree.Right;
return tree.Height != 0 ? tree.Value : null;
}
/// <summary>Returns all sub-trees enumerated from left to right.</summary>
/// <returns>Enumerated sub-trees or empty if tree is empty.</returns>
public IEnumerable<ImTreeMapIntToObj> Enumerate()
{
if (Height == 0)
yield break;
var parents = new ImTreeMapIntToObj[Height];
var tree = this;
var parentCount = -1;
while (tree.Height != 0 || parentCount != -1)
{
if (tree.Height != 0)
{
parents[++parentCount] = tree;
tree = tree.Left;
}
else
{
tree = parents[parentCount--];
yield return tree;
tree = tree.Right;
}
}
}
#region Implementation
private ImTreeMapIntToObj() { }
private ImTreeMapIntToObj(int key, object value, ImTreeMapIntToObj left, ImTreeMapIntToObj right)
{
Key = key;
Value = value;
Left = left;
Right = right;
Height = 1 + (left.Height > right.Height ? left.Height : right.Height);
}
private ImTreeMapIntToObj AddOrUpdate(int key, object value, bool updateOnly, UpdateValue update)
{
return Height == 0 ? // tree is empty
(updateOnly ? this : new ImTreeMapIntToObj(key, value, Empty, Empty))
: (key == Key ? // actual update
new ImTreeMapIntToObj(key, update == null ? value : update(Value, value), Left, Right)
: (key < Key // try update on left or right sub-tree
? With(Left.AddOrUpdate(key, value, updateOnly, update), Right)
: With(Left, Right.AddOrUpdate(key, value, updateOnly, update))).KeepBalanced());
}
private ImTreeMapIntToObj KeepBalanced()
{
var delta = Left.Height - Right.Height;
return delta >= 2 ? With(Left.Right.Height - Left.Left.Height == 1 ? Left.RotateLeft() : Left, Right).RotateRight()
: (delta <= -2 ? With(Left, Right.Left.Height - Right.Right.Height == 1 ? Right.RotateRight() : Right).RotateLeft()
: this);
}
private ImTreeMapIntToObj RotateRight()
{
return Left.With(Left.Left, With(Left.Right, Right));
}
private ImTreeMapIntToObj RotateLeft()
{
return Right.With(With(Left, Right.Left), Right.Right);
}
private ImTreeMapIntToObj With(ImTreeMapIntToObj left, ImTreeMapIntToObj right)
{
return left == Left && right == Right ? this : new ImTreeMapIntToObj(Key, Value, left, right);
}
#endregion
}
// todo: V3: Rename to ImHashTree
/// <summary>Immutable http://en.wikipedia.org/wiki/AVL_tree where actual node key is hash code of <typeparamref name="K"/>.</summary>
public sealed class ImTreeMap<K, V>
{
/// <summary>Empty tree to start with.</summary>
public static readonly ImTreeMap<K, V> Empty = new ImTreeMap<K, V>();
/// <summary>Key of type K that should support <see cref="object.Equals(object)"/> and <see cref="object.GetHashCode"/>.</summary>
public readonly K Key;
/// <summary>Value of any type V.</summary>
public readonly V Value;
/// <summary>Calculated key hash.</summary>
public readonly int Hash;
/// <summary>In case of <see cref="Hash"/> conflicts for different keys contains conflicted keys with their values.</summary>
public readonly KV<K, V>[] Conflicts;
/// <summary>Left sub-tree/branch, or empty.</summary>
public readonly ImTreeMap<K, V> Left;
/// <summary>Right sub-tree/branch, or empty.</summary>
public readonly ImTreeMap<K, V> Right;
/// <summary>Height of longest sub-tree/branch plus 1. It is 0 for empty tree, and 1 for single node tree.</summary>
public readonly int Height;
/// <summary>Returns true if tree is empty.</summary>
public bool IsEmpty { get { return Height == 0; } }
/// <summary>Returns new tree with added key-value. If value with the same key is exist, then
/// if <paramref name="update"/> is not specified: then existing value will be replaced by <paramref name="value"/>;
/// if <paramref name="update"/> is specified: then update delegate will decide what value to keep.</summary>
/// <param name="key">Key to add.</param><param name="value">Value to add.</param>
/// <param name="update">(optional) Delegate to decide what value to keep: old or new one.</param>
/// <returns>New tree with added or updated key-value.</returns>
public ImTreeMap<K, V> AddOrUpdate(K key, V value, Update<V> update = null)
{
return AddOrUpdate(key.GetHashCode(), key, value, update, updateOnly: false);
}
/// <summary>Looks for <paramref name="key"/> and replaces its value with new <paramref name="value"/>, or
/// runs custom update handler (<paramref name="update"/>) with old and new value to get the updated result.</summary>
/// <param name="key">Key to look for.</param>
/// <param name="value">New value to replace key value with.</param>
/// <param name="update">(optional) Delegate for custom update logic, it gets old and new <paramref name="value"/>
/// as inputs and should return updated value as output.</param>
/// <returns>New tree with updated value or the SAME tree if no key found.</returns>
public ImTreeMap<K, V> Update(K key, V value, Update<V> update = null)
{
return AddOrUpdate(key.GetHashCode(), key, value, update, updateOnly: true);
}
/// <summary>Looks for key in a tree and returns the key value if found, or <paramref name="defaultValue"/> otherwise.</summary>
/// <param name="key">Key to look for.</param> <param name="defaultValue">(optional) Value to return if key is not found.</param>
/// <returns>Found value or <paramref name="defaultValue"/>.</returns>
public V GetValueOrDefault(K key, V defaultValue = default(V))
{
var t = this;
var hash = key.GetHashCode();
while (t.Height != 0 && t.Hash != hash)
t = hash < t.Hash ? t.Left : t.Right;
return t.Height != 0 && (ReferenceEquals(key, t.Key) || key.Equals(t.Key))
? t.Value : t.GetConflictedValueOrDefault(key, defaultValue);
}
/// <summary>Depth-first in-order traversal as described in http://en.wikipedia.org/wiki/Tree_traversal
/// The only difference is using fixed size array instead of stack for speed-up (~20% faster than stack).</summary>
/// <returns>Sequence of enumerated key value pairs.</returns>
public IEnumerable<KV<K, V>> Enumerate()
{
if (Height == 0)
yield break;
var parents = new ImTreeMap<K, V>[Height];
var tree = this;
var parentCount = -1;
while (tree.Height != 0 || parentCount != -1)
{
if (tree.Height != 0)
{
parents[++parentCount] = tree;
tree = tree.Left;
}
else
{
tree = parents[parentCount--];
yield return new KV<K, V>(tree.Key, tree.Value);
if (tree.Conflicts != null)
for (var i = 0; i < tree.Conflicts.Length; i++)
yield return tree.Conflicts[i];
tree = tree.Right;
}
}
}
#region Implementation
private ImTreeMap() { }
private ImTreeMap(int hash, K key, V value, KV<K, V>[] conficts, ImTreeMap<K, V> left, ImTreeMap<K, V> right)
{
Hash = hash;
Key = key;
Value = value;
Conflicts = conficts;
Left = left;
Right = right;
Height = 1 + (left.Height > right.Height ? left.Height : right.Height);
}
private ImTreeMap<K, V> AddOrUpdate(int hash, K key, V value, Update<V> update, bool updateOnly)
{
return Height == 0 ? (updateOnly ? this : new ImTreeMap<K, V>(hash, key, value, null, Empty, Empty))
: (hash == Hash ? UpdateValueAndResolveConflicts(key, value, update, updateOnly)
: (hash < Hash
? With(Left.AddOrUpdate(hash, key, value, update, updateOnly), Right)
: With(Left, Right.AddOrUpdate(hash, key, value, update, updateOnly))).KeepBalanced());
}
private ImTreeMap<K, V> UpdateValueAndResolveConflicts(K key, V value, Update<V> update, bool updateOnly)
{
if (ReferenceEquals(Key, key) || Key.Equals(key))
return new ImTreeMap<K, V>(Hash, key, update == null ? value : update(Value, value), Conflicts, Left, Right);
if (Conflicts == null) // add only if updateOnly is false.
return updateOnly ? this
: new ImTreeMap<K, V>(Hash, Key, Value, new[] { new KV<K, V>(key, value) }, Left, Right);
var found = Conflicts.Length - 1;
while (found >= 0 && !Equals(Conflicts[found].Key, Key)) --found;
if (found == -1)
{
if (updateOnly) return this;
var newConflicts = new KV<K, V>[Conflicts.Length + 1];
Array.Copy(Conflicts, 0, newConflicts, 0, Conflicts.Length);
newConflicts[Conflicts.Length] = new KV<K, V>(key, value);
return new ImTreeMap<K, V>(Hash, Key, Value, newConflicts, Left, Right);
}
var conflicts = new KV<K, V>[Conflicts.Length];
Array.Copy(Conflicts, 0, conflicts, 0, Conflicts.Length);
conflicts[found] = new KV<K, V>(key, update == null ? value : update(Conflicts[found].Value, value));
return new ImTreeMap<K, V>(Hash, Key, Value, conflicts, Left, Right);
}
private V GetConflictedValueOrDefault(K key, V defaultValue)
{
if (Conflicts != null)
for (var i = 0; i < Conflicts.Length; i++)
if (Equals(Conflicts[i].Key, key))
return Conflicts[i].Value;
return defaultValue;
}
private ImTreeMap<K, V> KeepBalanced()
{
var delta = Left.Height - Right.Height;
return delta >= 2 ? With(Left.Right.Height - Left.Left.Height == 1 ? Left.RotateLeft() : Left, Right).RotateRight()
: (delta <= -2 ? With(Left, Right.Left.Height - Right.Right.Height == 1 ? Right.RotateRight() : Right).RotateLeft()
: this);
}
private ImTreeMap<K, V> RotateRight()
{
return Left.With(Left.Left, With(Left.Right, Right));
}
private ImTreeMap<K, V> RotateLeft()
{
return Right.With(With(Left, Right.Left), Right.Right);
}
private ImTreeMap<K, V> With(ImTreeMap<K, V> left, ImTreeMap<K, V> right)
{
return left == Left && right == Right ? this : new ImTreeMap<K, V>(Hash, Key, Value, Conflicts, left, right);
}
#endregion
}
}
| |
namespace Khnumdev.TwitBot.Data.DWH.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class Initial : DbMigration
{
public override void Up()
{
CreateTable(
"DWH.FactConversation",
c => new
{
Id = c.Int(nullable: false, identity: true),
ChannelId = c.Int(nullable: false),
ConversationTrackId = c.Int(nullable: false),
DateId = c.Int(nullable: false),
MessageId = c.Int(nullable: false),
MessageSourceId = c.Int(nullable: false),
MessageTypeId = c.Int(nullable: false),
FromUserId = c.Int(nullable: false),
ToUserId = c.Int(nullable: false),
Sentiment = c.Single(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("DWH.DimChannel", t => t.ChannelId, cascadeDelete: true)
.ForeignKey("DWH.DimConversation", t => t.ConversationTrackId, cascadeDelete: true)
.ForeignKey("DWH.DimDate", t => t.DateId, cascadeDelete: true)
.ForeignKey("DWH.DimUser", t => t.FromUserId)
.ForeignKey("DWH.DimMessage", t => t.MessageId, cascadeDelete: true)
.ForeignKey("DWH.DimMessageSource", t => t.MessageSourceId, cascadeDelete: true)
.ForeignKey("DWH.DimMessageType", t => t.MessageTypeId, cascadeDelete: true)
.ForeignKey("DWH.DimUser", t => t.ToUserId)
.Index(t => t.ChannelId)
.Index(t => t.ConversationTrackId)
.Index(t => t.DateId)
.Index(t => t.MessageId)
.Index(t => t.MessageSourceId)
.Index(t => t.MessageTypeId)
.Index(t => t.FromUserId)
.Index(t => t.ToUserId);
CreateTable(
"DWH.DimChannel",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(maxLength: 20),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.DimConversation",
c => new
{
Id = c.Int(nullable: false, identity: true),
ConversationId = c.String(maxLength: 20),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.DimDate",
c => new
{
Id = c.Int(nullable: false),
Year = c.String(maxLength: 4, fixedLength: true, unicode: false),
Quarter = c.DateTime(nullable: false, precision: 7, storeType: "datetime2"),
QuarterName = c.String(maxLength: 20),
Month = c.DateTime(nullable: false, precision: 7, storeType: "datetime2"),
MonthName = c.String(maxLength: 20),
Day = c.Byte(nullable: false),
Hour = c.Byte(nullable: false),
Date = c.DateTime(nullable: false, precision: 7, storeType: "datetime2"),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.DimUser",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(maxLength: 50),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.DimMessage",
c => new
{
Id = c.Int(nullable: false, identity: true),
Content = c.String(maxLength: 150),
ConversationId = c.String(maxLength: 20),
ChannelId = c.String(maxLength: 20),
User = c.String(maxLength: 50),
Date = c.DateTime(nullable: false),
LoadedOn = c.DateTime(nullable: false),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.DimMessageSource",
c => new
{
Id = c.Int(nullable: false, identity: true),
Source = c.String(maxLength: 20),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.DimMessageType",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(maxLength: 20),
})
.PrimaryKey(t => t.Id);
CreateTable(
"DWH.FactWord",
c => new
{
Id = c.Int(nullable: false, identity: true),
ChannelId = c.Int(nullable: false),
ConversationTrackId = c.Int(nullable: false),
DateId = c.Int(nullable: false),
MessageId = c.Int(nullable: false),
MessageSourceId = c.Int(nullable: false),
MessageTypeId = c.Int(nullable: false),
FromUserId = c.Int(nullable: false),
ToUserId = c.Int(nullable: false),
Content = c.String(maxLength: 20),
})
.PrimaryKey(t => t.Id)
.ForeignKey("DWH.DimChannel", t => t.ChannelId, cascadeDelete: true)
.ForeignKey("DWH.DimConversation", t => t.ConversationTrackId, cascadeDelete: true)
.ForeignKey("DWH.DimDate", t => t.DateId, cascadeDelete: true)
.ForeignKey("DWH.DimUser", t => t.FromUserId)
.ForeignKey("DWH.DimMessage", t => t.MessageId, cascadeDelete: true)
.ForeignKey("DWH.DimMessageSource", t => t.MessageSourceId, cascadeDelete: true)
.ForeignKey("DWH.DimMessageType", t => t.MessageTypeId, cascadeDelete: true)
.ForeignKey("DWH.DimUser", t => t.ToUserId)
.Index(t => t.ChannelId)
.Index(t => t.ConversationTrackId)
.Index(t => t.DateId)
.Index(t => t.MessageId)
.Index(t => t.MessageSourceId)
.Index(t => t.MessageTypeId)
.Index(t => t.FromUserId)
.Index(t => t.ToUserId);
CreateTable(
"DWH.DimLanguage",
c => new
{
Id = c.Int(nullable: false, identity: true),
Name = c.String(maxLength: 20),
})
.PrimaryKey(t => t.Id);
}
public override void Down()
{
DropForeignKey("DWH.FactWord", "ToUserId", "DWH.DimUser");
DropForeignKey("DWH.FactWord", "MessageTypeId", "DWH.DimMessageType");
DropForeignKey("DWH.FactWord", "MessageSourceId", "DWH.DimMessageSource");
DropForeignKey("DWH.FactWord", "MessageId", "DWH.DimMessage");
DropForeignKey("DWH.FactWord", "FromUserId", "DWH.DimUser");
DropForeignKey("DWH.FactWord", "DateId", "DWH.DimDate");
DropForeignKey("DWH.FactWord", "ConversationTrackId", "DWH.DimConversation");
DropForeignKey("DWH.FactWord", "ChannelId", "DWH.DimChannel");
DropForeignKey("DWH.FactConversation", "ToUserId", "DWH.DimUser");
DropForeignKey("DWH.FactConversation", "MessageTypeId", "DWH.DimMessageType");
DropForeignKey("DWH.FactConversation", "MessageSourceId", "DWH.DimMessageSource");
DropForeignKey("DWH.FactConversation", "MessageId", "DWH.DimMessage");
DropForeignKey("DWH.FactConversation", "FromUserId", "DWH.DimUser");
DropForeignKey("DWH.FactConversation", "DateId", "DWH.DimDate");
DropForeignKey("DWH.FactConversation", "ConversationTrackId", "DWH.DimConversation");
DropForeignKey("DWH.FactConversation", "ChannelId", "DWH.DimChannel");
DropIndex("DWH.FactWord", new[] { "ToUserId" });
DropIndex("DWH.FactWord", new[] { "FromUserId" });
DropIndex("DWH.FactWord", new[] { "MessageTypeId" });
DropIndex("DWH.FactWord", new[] { "MessageSourceId" });
DropIndex("DWH.FactWord", new[] { "MessageId" });
DropIndex("DWH.FactWord", new[] { "DateId" });
DropIndex("DWH.FactWord", new[] { "ConversationTrackId" });
DropIndex("DWH.FactWord", new[] { "ChannelId" });
DropIndex("DWH.FactConversation", new[] { "ToUserId" });
DropIndex("DWH.FactConversation", new[] { "FromUserId" });
DropIndex("DWH.FactConversation", new[] { "MessageTypeId" });
DropIndex("DWH.FactConversation", new[] { "MessageSourceId" });
DropIndex("DWH.FactConversation", new[] { "MessageId" });
DropIndex("DWH.FactConversation", new[] { "DateId" });
DropIndex("DWH.FactConversation", new[] { "ConversationTrackId" });
DropIndex("DWH.FactConversation", new[] { "ChannelId" });
DropTable("DWH.DimLanguage");
DropTable("DWH.FactWord");
DropTable("DWH.DimMessageType");
DropTable("DWH.DimMessageSource");
DropTable("DWH.DimMessage");
DropTable("DWH.DimUser");
DropTable("DWH.DimDate");
DropTable("DWH.DimConversation");
DropTable("DWH.DimChannel");
DropTable("DWH.FactConversation");
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp.Syntax.InternalSyntax
{
internal partial class LanguageParser : SyntaxParser
{
private TypeParameterConstraintSyntax ParseTypeParameterConstraint(bool isFirst, ref bool isStruct)
{
switch (this.CurrentToken.Kind)
{
//case SyntaxKind.NewKeyword:
// var newToken = this.EatToken();
// if (isStruct)
// {
// newToken = this.AddError(newToken, ErrorCode.ERR_NewBoundWithVal);
// }
// var open = this.EatToken(SyntaxKind.OpenParenToken);
// var close = this.EatToken(SyntaxKind.CloseParenToken);
// if (this.CurrentToken.Kind == SyntaxKind.CommaToken)
// {
// newToken = this.AddError(newToken, ErrorCode.ERR_NewBoundMustBeLast);
// }
// return _syntaxFactory.ConstructorConstraint(newToken, open, close);
//case SyntaxKind.ClassKeyword:
// var token = this.EatToken();
// if (!isFirst)
// {
// token = this.AddError(token, ErrorCode.ERR_RefValBoundMustBeFirst);
// }
// return _syntaxFactory.ClassOrStructConstraint(isStruct ? SyntaxKind.StructConstraint : SyntaxKind.ClassConstraint, token);
default:
var type = this.ParseDeclarationType(true, false);
return _syntaxFactory.TypeConstraint(type);
}
}
private bool IsPossibleOnDemandImport()
{
//skip @interface define
if (this.CurrentToken.Kind == SyntaxKind.AtToken && this.PeekToken(1).Kind == SyntaxKind.InterfaceKeyword)
{
return false;
}
var resetPoint = this.GetResetPoint();
try
{
TypeSyntax type = this.ParseType(false);
if (this.CurrentToken.Kind == SyntaxKind.DotToken && this.PeekToken(1).Kind == SyntaxKind.AsteriskToken)
{
return true;
}
}
finally
{
this.Reset(ref resetPoint);
this.Release(ref resetPoint);
}
return false;
}
private TypeSyntax ParseDeclarationType(bool isConstraint, bool parentIsParameter)
{
var type = this.ParseType(parentIsParameter);
if (type.Kind != SyntaxKind.PredefinedType && !SyntaxKindFacts.IsName(type.Kind))
{
if (isConstraint)
{
type = this.AddError(type, ErrorCode.ERR_BadConstraintType);
}
else
{
type = this.AddError(type, ErrorCode.ERR_BadBaseType);
}
}
return type;
}
private ArrayRankSpecifierSyntax ParseArrayRankSpecifier(bool isArrayCreation, bool expectSizes, out bool sawNonOmittedSize)
{
sawNonOmittedSize = false;
bool sawOmittedSize = false;
var open = this.EatToken(SyntaxKind.OpenBracketToken);
var list = this._pool.AllocateSeparated<ExpressionSyntax>();
try
{
var omittedArraySizeExpressionInstance = _syntaxFactory.OmittedArraySizeExpression(SyntaxFactory.Token(SyntaxKind.OmittedArraySizeExpressionToken));
while (this.CurrentToken.Kind != SyntaxKind.CloseBracketToken)
{
if (this.CurrentToken.Kind == SyntaxKind.CommaToken)
{
// NOTE: trivia will be attached to comma, not omitted array size
sawOmittedSize = true;
list.Add(omittedArraySizeExpressionInstance);
list.AddSeparator(this.EatToken());
}
else if (this.IsPossibleExpression())
{
var size = this.ParseExpression();
sawNonOmittedSize = true;
if (!expectSizes)
{
size = this.AddError(size, isArrayCreation ? ErrorCode.ERR_InvalidArray : ErrorCode.ERR_ArraySizeInDeclaration);
}
list.Add(size);
if (this.CurrentToken.Kind != SyntaxKind.CloseBracketToken)
{
list.AddSeparator(this.EatToken(SyntaxKind.CommaToken));
}
}
else if (this.SkipBadArrayRankSpecifierTokens(ref open, list, SyntaxKind.CommaToken) == PostSkipAction.Abort)
{
break;
}
}
// Don't end on a comma.
// If the omitted size would be the only element, then skip it unless sizes were expected.
if (((list.Count & 1) == 0))
{
sawOmittedSize = true;
list.Add(omittedArraySizeExpressionInstance);
}
// Never mix omitted and non-omitted array sizes. If there were non-omitted array sizes,
// then convert all of the omitted array sizes to missing identifiers.
if (sawOmittedSize && sawNonOmittedSize)
{
for (int i = 0; i < list.Count; i++)
{
if (list[i].Kind == SyntaxKind.OmittedArraySizeExpression)
{
int width = list[i].Width;
int offset = list[i].GetLeadingTriviaWidth();
list[i] = this.AddError(this.CreateMissingIdentifierName(), offset, width, ErrorCode.ERR_ValueExpected);
}
}
}
// Eat the close brace and we're done.
var close = this.EatToken(SyntaxKind.CloseBracketToken);
return _syntaxFactory.ArrayRankSpecifier(open, list, close);
}
finally
{
this._pool.Free(list);
}
}
private NameEqualsSyntax ParseNameEquals(bool warnOnGlobal = false)
{
Debug.Assert(this.IsNamedAssignment());
var id = this.ParseIdentifierToken();
var equals = this.EatToken(SyntaxKind.EqualsToken);
return _syntaxFactory.NameEquals(_syntaxFactory.IdentifierName(id), equals);
}
private VariableDeclaratorSyntax ParseVariableDeclarator(TypeSyntax parentType, VariableFlags flags, bool isFirst, bool isExpressionContext = false)
{
if (this.IsIncrementalAndFactoryContextMatches && CanReuseVariableDeclarator(this.CurrentNode as CSharp.Syntax.VariableDeclaratorSyntax, flags, isFirst))
{
return (VariableDeclaratorSyntax)this.EatNode();
}
if (!isExpressionContext)
{
// Check for the common pattern of:
//
// C //<-- here
// Console.WriteLine();
//
// Standard greedy parsing will assume that this should be parsed as a variable
// declaration: "C Console". We want to avoid that as it can confused parts of the
// system further up. So, if we see certain things following the identifier, then we can
// assume it's not the actual name.
//
// So, if we're after a newline and we see a name followed by the list below, then we
// assume that we're accidently consuming too far into the next statement.
//
// <dot>, <arrow>, any binary operator (except =), <question>. None of these characters
// are allowed in a normal variable declaration. This also provides a more useful error
// message to the user. Instead of telling them that a semicolon is expected after the
// following token, then instead get a useful message about an identifier being missing.
// The above list prevents:
//
// C //<-- here
// Console.WriteLine();
//
// C //<-- here
// Console->WriteLine();
//
// C
// A + B; // etc.
//
// C
// A ? B : D;
var resetPoint = this.GetResetPoint();
try
{
var currentTokenKind = this.CurrentToken.Kind;
if (currentTokenKind == SyntaxKind.IdentifierToken && !parentType.IsMissing)
{
var isAfterNewLine = parentType.GetLastToken().TrailingTrivia.Any(SyntaxKind.EndOfLineTrivia);
if (isAfterNewLine)
{
int offset, width;
this.GetDiagnosticSpanForMissingToken(out offset, out width);
this.EatToken();
currentTokenKind = this.CurrentToken.Kind;
var isNonEqualsBinaryToken =
currentTokenKind != SyntaxKind.EqualsToken &&
SyntaxKindFacts.IsBinaryExpressionOperatorToken(currentTokenKind);
if (currentTokenKind == SyntaxKind.DotToken ||
currentTokenKind == SyntaxKind.MinusGreaterThanToken ||
isNonEqualsBinaryToken)
{
var missingIdentifier = CreateMissingIdentifierToken();
missingIdentifier = this.AddError(missingIdentifier, offset, width, ErrorCode.ERR_IdentifierExpected);
return _syntaxFactory.VariableDeclarator(missingIdentifier, default(SyntaxList<ArrayRankSpecifierSyntax>), null,
null);
}
}
}
}
finally
{
this.Reset(ref resetPoint);
this.Release(ref resetPoint);
}
}
//if (this.IsPossibleRankAndDimensionSpecifier())
//{
// var ranks = this._pool.Allocate<ArrayRankSpecifierSyntax>();
// try
// {
// while (this.IsPossibleRankAndDimensionSpecifier())
// {
// bool unused;
// var rank = this.ParseArrayRankSpecifier(isArrayCreation, expectSizes, out unused);
// ranks.Add(rank);
// expectSizes = false;
// }
// type = _syntaxFactory.ArrayType(type, ranks);
// }
// finally
// {
// this._pool.Free(ranks);
// }
//}
// NOTE: Diverges from Dev10.
//
// When we see parse an identifier and we see the partial contextual keyword, we check
// to see whether it is already attached to a partial class or partial method
// declaration. However, in the specific case of variable declarators, Dev10
// specifically treats it as a variable name, even if it could be interpreted as a
// keyword.
var name = this.ParseIdentifierToken();
BracketedArgumentListSyntax argumentList = null;
EqualsValueClauseSyntax initializer = null;
TerminatorState saveTerm = this._termState;
SyntaxList<ArrayRankSpecifierSyntax> ranges = default(SyntaxList<ArrayRankSpecifierSyntax>);
bool isLocal = (flags & VariableFlags.Local) != 0;
bool isFinal = (flags & VariableFlags.Final) != 0;
// Give better error message in the case where the user did something like:
//
// X x = 1, Y y = 2;
// using (X x = expr1, Y y = expr2) ...
//
// The superfluous type name is treated as variable (it is an identifier) and a missing ',' is injected after it.
if (!isFirst && this.IsTrueIdentifier())
{
name = this.AddError(name, ErrorCode.ERR_MultiTypeInDeclaration);
}
switch (this.CurrentToken.Kind)
{
case SyntaxKind.EqualsToken:
var equals = this.EatToken();
var init = this.ParseVariableInitializer(isLocal);
initializer = _syntaxFactory.EqualsValueClause(equals, init);
break;
case SyntaxKind.OpenParenToken:
// Special case for accidental use of C-style constructors
// Fake up something to hold the arguments.
this._termState |= TerminatorState.IsPossibleEndOfVariableDeclaration;
argumentList = this.ParseBracketedArgumentList();
this._termState = saveTerm;
argumentList = this.AddError(argumentList, ErrorCode.ERR_BadVarDecl);
break;
case SyntaxKind.OpenBracketToken:
bool sawNonOmittedSize;
if (this.IsPossibleRankAndDimensionSpecifier())
{
this._termState |= TerminatorState.IsPossibleEndOfVariableDeclaration;
var ranks = this._pool.Allocate<ArrayRankSpecifierSyntax>();
try
{
while (this.IsPossibleRankAndDimensionSpecifier())
{
var rank = this.ParseArrayRankSpecifier(false, false, out sawNonOmittedSize);
ranks.Add(rank);
}
}
finally
{
ranges = ranks.ToList();
this._pool.Free(ranks);
}
this._termState = saveTerm;
}
if (this.CurrentToken.Kind == SyntaxKind.EqualsToken)
{
goto case SyntaxKind.EqualsToken;
}
//this._termState |= TerminatorState.IsPossibleEndOfVariableDeclaration;
//var specifier = this.ParseArrayRankSpecifier(isArrayCreation: false, expectSizes: false, sawNonOmittedSize: out sawNonOmittedSize);
//this._termState = saveTerm;
//var open = specifier.OpenBracketToken;
//var sizes = specifier.Sizes;
//var close = specifier.CloseBracketToken;
//if (!sawNonOmittedSize)
//{
// close = this.AddError(close, ErrorCode.ERR_ValueExpected);
//}
//var args = this._pool.AllocateSeparated<ArgumentSyntax>();
//try
//{
// var withSeps = sizes.GetWithSeparators();
// foreach (var item in withSeps)
// {
// var expression = item as ExpressionSyntax;
// if (expression != null)
// {
// args.Add(_syntaxFactory.Argument(null, expression));
// }
// else
// {
// args.AddSeparator((SyntaxToken)item);
// }
// }
// argumentList = _syntaxFactory.BracketedArgumentList(open, args, close);
// {
// argumentList = this.AddError(argumentList, ErrorCode.ERR_CStyleArray);
// // If we have "int x[] = new int[10];" then parse the initializer.
// if (this.CurrentToken.Kind == SyntaxKind.EqualsToken)
// {
// goto case SyntaxKind.EqualsToken;
// }
// }
//}
//finally
//{
// this._pool.Free(args);
//}
break;
default:
//if (isFixed)
//{
// if (parentType.Kind == SyntaxKind.ArrayType)
// {
// // They accidentally put the array before the identifier
// name = this.AddError(name, ErrorCode.ERR_FixedDimsRequired);
// }
// else
// {
// goto case SyntaxKind.OpenBracketToken;
// }
//}
break;
}
return _syntaxFactory.VariableDeclarator(name, ranges, argumentList, initializer);
}
// This is public and parses open types. You probably don't want to use it.
public NameSyntax ParseName()
{
return this.ParseQualifiedName();
}
private IdentifierNameSyntax ParseIdentifierName()
{
if (this.IsIncrementalAndFactoryContextMatches && this.CurrentNodeKind == SyntaxKind.IdentifierName)
{
return (IdentifierNameSyntax)this.EatNode();
}
var tk = ParseIdentifierToken();
return SyntaxFactory.IdentifierName(tk);
}
private SyntaxToken ParseIdentifierToken()
{
var ctk = this.CurrentToken.Kind;
if (ctk == SyntaxKind.IdentifierToken)
{
// Error tolerance for IntelliSense. Consider the following case: [EditorBrowsable( partial class Foo {
// } Because we're parsing an _annotation argument we'll end up consuming the "partial" identifier and
// we'll eventually end up in an pretty confused state. Because of that it becomes very difficult to
// show the correct parameter help in this case. So, when we see "partial" we check if it's being used
// as an identifier or as a contextual keyword. If it's the latter then we bail out. See
// Bug: vswhidbey/542125
SyntaxToken identifierToken = this.EatToken();
return identifierToken;
}
else
{
var name = CreateMissingIdentifierToken();
name = this.AddError(name, ErrorCode.ERR_IdentifierExpected);
return name;
}
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
namespace System.Activities.DurableInstancing
{
using System.Collections.Generic;
using System.Data.SqlClient;
using System.Globalization;
using System.Runtime;
using System.Runtime.DurableInstancing;
using System.Text;
using System.Xml.Linq;
static class StoreUtilities
{
public static readonly Version Version40 = new Version(4, 0, 0, 0);
public static readonly Version Version45 = new Version(4, 5, 0, 0);
public static Exception CheckRemainingResultSetForErrors(XName commandName, SqlDataReader reader)
{
Exception returnException = null;
do
{
returnException = StoreUtilities.GetNextResultSet(commandName, reader);
}
while (returnException == null && reader.NextResult());
return returnException;
}
public static Exception CheckResult(XName commandName, SqlDataReader reader)
{
Exception returnValue = null;
CommandResult result = (CommandResult) reader.GetInt32(0);
if (result != CommandResult.Success)
{
returnValue = StoreUtilities.GetError(commandName, result, reader);
}
return returnValue;
}
public static SqlConnection CreateConnection(string connectionString)
{
SqlConnection connection = new SqlConnection(connectionString);
connection.Open();
return connection;
}
public static Exception GetError(XName commandName, CommandResult result, SqlDataReader reader)
{
Exception returnValue = null;
if (result != CommandResult.Success)
{
switch (result)
{
case CommandResult.InstanceAlreadyExists:
returnValue = new InstanceCollisionException(commandName, reader.GetGuid(1));
break;
case CommandResult.InstanceLockNotAcquired:
returnValue = new InstanceLockedException(commandName, reader.GetGuid(1), reader.GetGuid(2), ReadLockOwnerMetadata(reader));
break;
case CommandResult.InstanceNotFound:
returnValue = new InstanceNotReadyException(commandName, reader.GetGuid(1));
break;
case CommandResult.KeyAlreadyExists:
returnValue = new InstanceKeyCollisionException(commandName, Guid.Empty,
new InstanceKey(reader.GetGuid(1)), Guid.Empty);
break;
case CommandResult.KeyNotFound:
returnValue = new InstanceKeyNotReadyException(commandName, new InstanceKey(reader.GetGuid(1)));
break;
case CommandResult.InstanceLockLost:
returnValue = new InstanceLockLostException(commandName, reader.GetGuid(1));
break;
case CommandResult.InstanceCompleted:
returnValue = new InstanceCompleteException(commandName, reader.GetGuid(1));
break;
case CommandResult.KeyDisassociated:
returnValue = new InstanceKeyCompleteException(commandName, new InstanceKey(reader.GetGuid(1)));
break;
case CommandResult.StaleInstanceVersion:
returnValue = new InstanceLockLostException(commandName, reader.GetGuid(1));
break;
case CommandResult.HostLockExpired:
returnValue = new InstancePersistenceException(SR.HostLockExpired);
break;
case CommandResult.HostLockNotFound:
returnValue = new InstancePersistenceException(SR.HostLockNotFound);
break;
case CommandResult.CleanupInProgress:
returnValue = new InstancePersistenceCommandException(SR.CleanupInProgress);
break;
case CommandResult.InstanceAlreadyLockedToOwner:
returnValue = new InstanceAlreadyLockedToOwnerException(commandName, reader.GetGuid(1), reader.GetInt64(2));
break;
default:
returnValue = new InstancePersistenceCommandException(SR.UnknownSprocResult(result));
break;
}
}
return returnValue;
}
public static Exception GetNextResultSet(XName commandName, SqlDataReader reader)
{
do
{
if (reader.Read())
{
do
{
if (reader.FieldCount == 0)
{
continue;
}
string columnName = reader.GetName(0);
if (string.Compare("Result", columnName, StringComparison.Ordinal) == 0)
{
return StoreUtilities.CheckResult(commandName, reader);
}
}
while (reader.Read());
}
}
while (reader.NextResult());
return null;
}
public static void TraceSqlCommand(SqlCommand command, bool isStarting)
{
if (((isStarting && TD.StartSqlCommandExecuteIsEnabled()) ||
(!isStarting && TD.EndSqlCommandExecuteIsEnabled())) && command != null)
{
StringBuilder traceString = new StringBuilder(SqlWorkflowInstanceStoreConstants.DefaultStringBuilderCapacity);
bool firstItem = false;
foreach (SqlParameter sqlParameter in command.Parameters)
{
string value;
if ((sqlParameter.Value == DBNull.Value) || (sqlParameter.Value == null))
{
value = "Null";
}
else if (sqlParameter.DbType == System.Data.DbType.Binary)
{
value = "Binary";
}
else
{
value = sqlParameter.Value.ToString();
}
if (firstItem)
{
traceString.AppendFormat(CultureInfo.InvariantCulture, "{0}='{1}'", sqlParameter.ParameterName, value);
firstItem = false;
}
else
{
traceString.AppendFormat(CultureInfo.InvariantCulture, ", {0}='{1}'", sqlParameter.ParameterName, value);
}
traceString.AppendLine(command.CommandText);
}
if (isStarting)
{
TD.StartSqlCommandExecute(traceString.ToString());
}
else
{
TD.EndSqlCommandExecute(traceString.ToString());
}
}
}
static Dictionary<XName, object> ReadLockOwnerMetadata(SqlDataReader reader)
{
Dictionary<XName, object> lockOwnerProperties = new Dictionary<XName, object>();
InstanceEncodingOption encodingOption = (InstanceEncodingOption)(reader.GetByte(3));
byte[] serializedPrimitiveLockOwnerData = reader.IsDBNull(4) ? null : (byte[]) reader.GetValue(4);
byte[] serializedComplexLockOwnerData = reader.IsDBNull(5) ? null : (byte[]) reader.GetValue(5);
IObjectSerializer serializer = ObjectSerializerFactory.GetObjectSerializer(encodingOption);
Dictionary<XName, object>[] lockOwnerPropertyBags = new Dictionary<XName, object>[2];
if (serializedPrimitiveLockOwnerData != null)
{
lockOwnerPropertyBags[0] = (Dictionary<XName, object>)serializer.DeserializeValue(serializedPrimitiveLockOwnerData);
}
if (serializedComplexLockOwnerData != null)
{
lockOwnerPropertyBags[1] = serializer.DeserializePropertyBag(serializedComplexLockOwnerData);
}
foreach (Dictionary<XName, object> propertyBag in lockOwnerPropertyBags)
{
if (propertyBag != null)
{
foreach (KeyValuePair<XName, object> property in propertyBag)
{
lockOwnerProperties.Add(property.Key, property.Value);
}
}
}
return lockOwnerProperties;
}
}
}
| |
/*
* CID002f.cs - mk culture handler.
*
* Copyright (c) 2003 Southern Storm Software, Pty Ltd
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
// Generated from "mk.txt".
namespace I18N.Other
{
using System;
using System.Globalization;
using I18N.Common;
public class CID002f : RootCulture
{
public CID002f() : base(0x002F) {}
public CID002f(int culture) : base(culture) {}
public override String Name
{
get
{
return "mk";
}
}
public override String ThreeLetterISOLanguageName
{
get
{
return "mkd";
}
}
public override String ThreeLetterWindowsLanguageName
{
get
{
return "MKI";
}
}
public override String TwoLetterISOLanguageName
{
get
{
return "mk";
}
}
public override DateTimeFormatInfo DateTimeFormat
{
get
{
DateTimeFormatInfo dfi = base.DateTimeFormat;
dfi.AbbreviatedDayNames = new String[] {"\u043D\u0435\u0434.", "\u043F\u043E\u043D.", "\u0432\u0442.", "\u0441\u0440\u0435.", "\u0447\u0435\u0442.", "\u043F\u0435\u0442.", "\u0441\u0430\u0431."};
dfi.DayNames = new String[] {"\u043D\u0435\u0434\u0435\u043B\u0430", "\u043F\u043E\u043D\u0435\u0434\u0435\u043B\u043D\u0438\u043A", "\u0432\u0442\u043E\u0440\u043D\u0438\u043A", "\u0441\u0440\u0435\u0434\u0430", "\u0447\u0435\u0442\u0432\u0440\u0442\u043E\u043A", "\u043F\u0435\u0442\u043E\u043A", "\u0441\u0430\u0431\u043E\u0442\u0430"};
dfi.AbbreviatedMonthNames = new String[] {"\u0458\u0430\u043D.", "\u0444\u0435\u0432.", "\u043C\u0430\u0440.", "\u0430\u043F\u0440.", "\u043C\u0430\u0458.", "\u0458\u0443\u043D.", "\u0458\u0443\u043B.", "\u0430\u0432\u0433.", "\u0441\u0435\u043F\u0442.", "\u043E\u043A\u0442.", "\u043D\u043E\u0435\u043C.", "\u0434\u0435\u043A\u0435\u043C.", ""};
dfi.MonthNames = new String[] {"\u0458\u0430\u043D\u0443\u0430\u0440\u0438", "\u0444\u0435\u0432\u0440\u0443\u0430\u0440\u0438", "\u043C\u0430\u0440\u0442", "\u0430\u043F\u0440\u0438\u043B", "\u043C\u0430\u0458", "\u0458\u0443\u043D\u0438", "\u0458\u0443\u043B\u0438", "\u0430\u0432\u0433\u0443\u0441\u0442", "\u0441\u0435\u043F\u0442\u0435\u043C\u0432\u0440\u0438", "\u043E\u043A\u0442\u043E\u043C\u0432\u0440\u0438", "\u043D\u043E\u0435\u043C\u0432\u0440\u0438", "\u0434\u0435\u043A\u0435\u043C\u0432\u0440\u0438", ""};
dfi.DateSeparator = ".";
dfi.TimeSeparator = ":";
dfi.LongDatePattern = "d, MMMM yyyy";
dfi.LongTimePattern = "HH:mm:ss z";
dfi.ShortDatePattern = "d.M.yy";
dfi.ShortTimePattern = "HH:mm";
dfi.FullDateTimePattern = "dddd, d, MMMM yyyy HH:mm:ss z";
dfi.I18NSetDateTimePatterns(new String[] {
"d:d.M.yy",
"D:dddd, d, MMMM yyyy",
"f:dddd, d, MMMM yyyy HH:mm:ss z",
"f:dddd, d, MMMM yyyy HH:mm:ss z",
"f:dddd, d, MMMM yyyy HH:mm:ss",
"f:dddd, d, MMMM yyyy HH:mm",
"F:dddd, d, MMMM yyyy HH:mm:ss",
"g:d.M.yy HH:mm:ss z",
"g:d.M.yy HH:mm:ss z",
"g:d.M.yy HH:mm:ss",
"g:d.M.yy HH:mm",
"G:d.M.yy HH:mm:ss",
"m:MMMM dd",
"M:MMMM dd",
"r:ddd, dd MMM yyyy HH':'mm':'ss 'GMT'",
"R:ddd, dd MMM yyyy HH':'mm':'ss 'GMT'",
"s:yyyy'-'MM'-'dd'T'HH':'mm':'ss",
"t:HH:mm:ss z",
"t:HH:mm:ss z",
"t:HH:mm:ss",
"t:HH:mm",
"T:HH:mm:ss",
"u:yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
"U:dddd, dd MMMM yyyy HH:mm:ss",
"y:yyyy MMMM",
"Y:yyyy MMMM",
});
return dfi;
}
set
{
base.DateTimeFormat = value; // not used
}
}
public override NumberFormatInfo NumberFormat
{
get
{
NumberFormatInfo nfi = base.NumberFormat;
nfi.CurrencyDecimalSeparator = ",";
nfi.CurrencyGroupSeparator = ".";
nfi.NumberGroupSeparator = ".";
nfi.PercentGroupSeparator = ".";
nfi.NegativeSign = "-";
nfi.NumberDecimalSeparator = ",";
nfi.PercentDecimalSeparator = ",";
nfi.PercentSymbol = "%";
nfi.PerMilleSymbol = "\u2030";
return nfi;
}
set
{
base.NumberFormat = value; // not used
}
}
public override String ResolveLanguage(String name)
{
switch(name)
{
case "mk": return "\u043C\u0430\u043A\u0435\u0434\u043E\u043D\u0441\u043A\u0438";
}
return base.ResolveLanguage(name);
}
public override String ResolveCountry(String name)
{
switch(name)
{
case "MK": return "\u041C\u0430\u043A\u0435\u0434\u043E\u043D\u0438\u0458\u0430";
}
return base.ResolveCountry(name);
}
private class PrivateTextInfo : _I18NTextInfo
{
public PrivateTextInfo(int culture) : base(culture) {}
public override int ANSICodePage
{
get
{
return 1251;
}
}
public override int EBCDICCodePage
{
get
{
return 500;
}
}
public override int MacCodePage
{
get
{
return 10007;
}
}
public override int OEMCodePage
{
get
{
return 866;
}
}
public override String ListSeparator
{
get
{
return ";";
}
}
}; // class PrivateTextInfo
public override TextInfo TextInfo
{
get
{
return new PrivateTextInfo(LCID);
}
}
}; // class CID002f
public class CNmk : CID002f
{
public CNmk() : base() {}
}; // class CNmk
}; // namespace I18N.Other
| |
/*
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Xml.Serialization;
using Amazon.ElasticTranscoder.Model;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
using Amazon.Runtime.Internal.Transform;
using Amazon.Runtime.Internal.Util;
using ThirdParty.Json.LitJson;
namespace Amazon.ElasticTranscoder.Model.Internal.MarshallTransformations
{
/// <summary>
/// Create Pipeline Request Marshaller
/// </summary>
internal class CreatePipelineRequestMarshaller : IMarshaller<IRequest, CreatePipelineRequest>
{
public IRequest Marshall(CreatePipelineRequest createPipelineRequest)
{
IRequest request = new DefaultRequest(createPipelineRequest, "AmazonElasticTranscoder");
string target = "EtsCustomerService.CreatePipeline";
request.Headers["X-Amz-Target"] = target;
request.Headers["Content-Type"] = "application/x-amz-json-1.0";
request.HttpMethod = "POST";
string uriResourcePath = "2012-09-25/pipelines";
if (uriResourcePath.Contains("?"))
{
string queryString = uriResourcePath.Substring(uriResourcePath.IndexOf("?") + 1);
uriResourcePath = uriResourcePath.Substring(0, uriResourcePath.IndexOf("?"));
foreach (string s in queryString.Split('&', ';'))
{
string[] nameValuePair = s.Split('=');
if (nameValuePair.Length == 2 && nameValuePair[1].Length > 0)
{
request.Parameters.Add(nameValuePair[0], nameValuePair[1]);
}
else
{
request.Parameters.Add(nameValuePair[0], null);
}
}
}
request.ResourcePath = uriResourcePath;
using (StringWriter stringWriter = new StringWriter())
{
JsonWriter writer = new JsonWriter(stringWriter);
writer.WriteObjectStart();
if (createPipelineRequest != null && createPipelineRequest.IsSetName())
{
writer.WritePropertyName("Name");
writer.Write(createPipelineRequest.Name);
}
if (createPipelineRequest != null && createPipelineRequest.IsSetInputBucket())
{
writer.WritePropertyName("InputBucket");
writer.Write(createPipelineRequest.InputBucket);
}
if (createPipelineRequest != null && createPipelineRequest.IsSetOutputBucket())
{
writer.WritePropertyName("OutputBucket");
writer.Write(createPipelineRequest.OutputBucket);
}
if (createPipelineRequest != null && createPipelineRequest.IsSetRole())
{
writer.WritePropertyName("Role");
writer.Write(createPipelineRequest.Role);
}
if (createPipelineRequest != null)
{
Notifications notifications = createPipelineRequest.Notifications;
if (notifications != null)
{
writer.WritePropertyName("Notifications");
writer.WriteObjectStart();
if (notifications != null && notifications.IsSetProgressing())
{
writer.WritePropertyName("Progressing");
writer.Write(notifications.Progressing);
}
if (notifications != null && notifications.IsSetCompleted())
{
writer.WritePropertyName("Completed");
writer.Write(notifications.Completed);
}
if (notifications != null && notifications.IsSetWarning())
{
writer.WritePropertyName("Warning");
writer.Write(notifications.Warning);
}
if (notifications != null && notifications.IsSetError())
{
writer.WritePropertyName("Error");
writer.Write(notifications.Error);
}
writer.WriteObjectEnd();
}
}
if (createPipelineRequest != null)
{
PipelineOutputConfig contentConfig = createPipelineRequest.ContentConfig;
if (contentConfig != null)
{
writer.WritePropertyName("ContentConfig");
writer.WriteObjectStart();
if (contentConfig != null && contentConfig.IsSetBucket())
{
writer.WritePropertyName("Bucket");
writer.Write(contentConfig.Bucket);
}
if (contentConfig != null && contentConfig.IsSetStorageClass())
{
writer.WritePropertyName("StorageClass");
writer.Write(contentConfig.StorageClass);
}
if (contentConfig != null && contentConfig.Permissions != null && contentConfig.Permissions.Count > 0)
{
List<Permission> permissionsList = contentConfig.Permissions;
writer.WritePropertyName("Permissions");
writer.WriteArrayStart();
foreach (Permission permissionsListValue in permissionsList)
{
writer.WriteObjectStart();
if (permissionsListValue != null && permissionsListValue.IsSetGranteeType())
{
writer.WritePropertyName("GranteeType");
writer.Write(permissionsListValue.GranteeType);
}
if (permissionsListValue != null && permissionsListValue.IsSetGrantee())
{
writer.WritePropertyName("Grantee");
writer.Write(permissionsListValue.Grantee);
}
if (permissionsListValue != null && permissionsListValue.Access != null && permissionsListValue.Access.Count > 0)
{
List<string> accessList = permissionsListValue.Access;
writer.WritePropertyName("Access");
writer.WriteArrayStart();
foreach (string accessListValue in accessList)
{
writer.Write(StringUtils.FromString(accessListValue));
}
writer.WriteArrayEnd();
}
writer.WriteObjectEnd();
}
writer.WriteArrayEnd();
}
writer.WriteObjectEnd();
}
}
if (createPipelineRequest != null)
{
PipelineOutputConfig thumbnailConfig = createPipelineRequest.ThumbnailConfig;
if (thumbnailConfig != null)
{
writer.WritePropertyName("ThumbnailConfig");
writer.WriteObjectStart();
if (thumbnailConfig != null && thumbnailConfig.IsSetBucket())
{
writer.WritePropertyName("Bucket");
writer.Write(thumbnailConfig.Bucket);
}
if (thumbnailConfig != null && thumbnailConfig.IsSetStorageClass())
{
writer.WritePropertyName("StorageClass");
writer.Write(thumbnailConfig.StorageClass);
}
if (thumbnailConfig != null && thumbnailConfig.Permissions != null && thumbnailConfig.Permissions.Count > 0)
{
List<Permission> permissionsList = thumbnailConfig.Permissions;
writer.WritePropertyName("Permissions");
writer.WriteArrayStart();
foreach (Permission permissionsListValue in permissionsList)
{
writer.WriteObjectStart();
if (permissionsListValue != null && permissionsListValue.IsSetGranteeType())
{
writer.WritePropertyName("GranteeType");
writer.Write(permissionsListValue.GranteeType);
}
if (permissionsListValue != null && permissionsListValue.IsSetGrantee())
{
writer.WritePropertyName("Grantee");
writer.Write(permissionsListValue.Grantee);
}
if (permissionsListValue != null && permissionsListValue.Access != null && permissionsListValue.Access.Count > 0)
{
List<string> accessList = permissionsListValue.Access;
writer.WritePropertyName("Access");
writer.WriteArrayStart();
foreach (string accessListValue in accessList)
{
writer.Write(StringUtils.FromString(accessListValue));
}
writer.WriteArrayEnd();
}
writer.WriteObjectEnd();
}
writer.WriteArrayEnd();
}
writer.WriteObjectEnd();
}
}
writer.WriteObjectEnd();
string snippet = stringWriter.ToString();
request.Content = System.Text.Encoding.UTF8.GetBytes(snippet);
}
return request;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Diagnostics;
#if !ES_BUILD_AGAINST_DOTNET_V35
using Contract = System.Diagnostics.Contracts.Contract;
#else
using Contract = Microsoft.Diagnostics.Contracts.Internal.Contract;
#endif
#if ES_BUILD_STANDALONE
namespace Microsoft.Diagnostics.Tracing
#else
namespace System.Diagnostics.Tracing
#endif
{
/// <summary>
/// Holds property values of any type. For common value types, we have inline storage so that we don't need
/// to box the values. For all other types, we store the value in a single object reference field.
///
/// To get the value of a property quickly, use a delegate produced by <see cref="PropertyValue.GetPropertyGetter(PropertyInfo)"/>.
/// </summary>
#if ES_BUILD_PN
[CLSCompliant(false)]
public
#else
internal
#endif
unsafe readonly struct PropertyValue
{
/// <summary>
/// Union of well-known value types, to avoid boxing those types.
/// </summary>
[StructLayout(LayoutKind.Explicit)]
public struct Scalar
{
[FieldOffset(0)]
public bool AsBoolean;
[FieldOffset(0)]
public byte AsByte;
[FieldOffset(0)]
public sbyte AsSByte;
[FieldOffset(0)]
public char AsChar;
[FieldOffset(0)]
public short AsInt16;
[FieldOffset(0)]
public ushort AsUInt16;
[FieldOffset(0)]
public int AsInt32;
[FieldOffset(0)]
public uint AsUInt32;
[FieldOffset(0)]
public long AsInt64;
[FieldOffset(0)]
public ulong AsUInt64;
[FieldOffset(0)]
public IntPtr AsIntPtr;
[FieldOffset(0)]
public UIntPtr AsUIntPtr;
[FieldOffset(0)]
public float AsSingle;
[FieldOffset(0)]
public double AsDouble;
[FieldOffset(0)]
public Guid AsGuid;
[FieldOffset(0)]
public DateTime AsDateTime;
[FieldOffset(0)]
public DateTimeOffset AsDateTimeOffset;
[FieldOffset(0)]
public TimeSpan AsTimeSpan;
[FieldOffset(0)]
public decimal AsDecimal;
}
// Anything not covered by the Scalar union gets stored in this reference.
readonly object? _reference;
readonly Scalar _scalar;
readonly int _scalarLength;
private PropertyValue(object? value)
{
_reference = value;
_scalar = default;
_scalarLength = 0;
}
private PropertyValue(Scalar scalar, int scalarLength)
{
_reference = null;
_scalar = scalar;
_scalarLength = scalarLength;
}
private PropertyValue(bool value) : this(new Scalar() { AsBoolean = value }, sizeof(bool)) { }
private PropertyValue(byte value) : this(new Scalar() { AsByte = value }, sizeof(byte)) { }
private PropertyValue(sbyte value) : this(new Scalar() { AsSByte = value }, sizeof(sbyte)) { }
private PropertyValue(char value) : this(new Scalar() { AsChar = value }, sizeof(char)) { }
private PropertyValue(short value) : this(new Scalar() { AsInt16 = value }, sizeof(short)) { }
private PropertyValue(ushort value) : this(new Scalar() { AsUInt16 = value }, sizeof(ushort)) { }
private PropertyValue(int value) : this(new Scalar() { AsInt32 = value }, sizeof(int)) { }
private PropertyValue(uint value) : this(new Scalar() { AsUInt32 = value }, sizeof(uint)) { }
private PropertyValue(long value) : this(new Scalar() { AsInt64 = value }, sizeof(long)) { }
private PropertyValue(ulong value) : this(new Scalar() { AsUInt64 = value }, sizeof(ulong)) { }
private PropertyValue(IntPtr value) : this(new Scalar() { AsIntPtr = value }, sizeof(IntPtr)) { }
private PropertyValue(UIntPtr value) : this(new Scalar() { AsUIntPtr = value }, sizeof(UIntPtr)) { }
private PropertyValue(float value) : this(new Scalar() { AsSingle = value }, sizeof(float)) { }
private PropertyValue(double value) : this(new Scalar() { AsDouble = value }, sizeof(double)) { }
private PropertyValue(Guid value) : this(new Scalar() { AsGuid = value }, sizeof(Guid)) { }
private PropertyValue(DateTime value) : this(new Scalar() { AsDateTime = value }, sizeof(DateTime)) { }
private PropertyValue(DateTimeOffset value) : this(new Scalar() { AsDateTimeOffset = value }, sizeof(DateTimeOffset)) { }
private PropertyValue(TimeSpan value) : this(new Scalar() { AsTimeSpan = value }, sizeof(TimeSpan)) { }
private PropertyValue(decimal value) : this(new Scalar() { AsDecimal = value }, sizeof(decimal)) { }
public static Func<object?, PropertyValue> GetFactory(Type type)
{
if (type == typeof(bool)) return value => new PropertyValue((bool)value!);
if (type == typeof(byte)) return value => new PropertyValue((byte)value!);
if (type == typeof(sbyte)) return value => new PropertyValue((sbyte)value!);
if (type == typeof(char)) return value => new PropertyValue((char)value!);
if (type == typeof(short)) return value => new PropertyValue((short)value!);
if (type == typeof(ushort)) return value => new PropertyValue((ushort)value!);
if (type == typeof(int)) return value => new PropertyValue((int)value!);
if (type == typeof(uint)) return value => new PropertyValue((uint)value!);
if (type == typeof(long)) return value => new PropertyValue((long)value!);
if (type == typeof(ulong)) return value => new PropertyValue((ulong)value!);
if (type == typeof(IntPtr)) return value => new PropertyValue((IntPtr)value!);
if (type == typeof(UIntPtr)) return value => new PropertyValue((UIntPtr)value!);
if (type == typeof(float)) return value => new PropertyValue((float)value!);
if (type == typeof(double)) return value => new PropertyValue((double)value!);
if (type == typeof(Guid)) return value => new PropertyValue((Guid)value!);
if (type == typeof(DateTime)) return value => new PropertyValue((DateTime)value!);
if (type == typeof(DateTimeOffset)) return value => new PropertyValue((DateTimeOffset)value!);
if (type == typeof(TimeSpan)) return value => new PropertyValue((TimeSpan)value!);
if (type == typeof(decimal)) return value => new PropertyValue((decimal)value!);
return value => new PropertyValue(value);
}
public object? ReferenceValue
{
get
{
Debug.Assert(_scalarLength == 0, "This ReflectedValue refers to an unboxed value type, not a reference type or boxed value type.");
return _reference;
}
}
public Scalar ScalarValue
{
get
{
Debug.Assert(_scalarLength > 0, "This ReflectedValue refers to a reference type or boxed value type, not an unboxed value type");
return _scalar;
}
}
public int ScalarLength
{
get
{
Debug.Assert(_scalarLength > 0, "This ReflectedValue refers to a reference type or boxed value type, not an unboxed value type");
return _scalarLength;
}
}
/// <summary>
/// Gets a delegate that gets the value of a given property.
/// </summary>
public static Func<PropertyValue, PropertyValue> GetPropertyGetter(PropertyInfo property)
{
if (property.DeclaringType!.GetTypeInfo().IsValueType)
return GetBoxedValueTypePropertyGetter(property);
else
return GetReferenceTypePropertyGetter(property);
}
/// <summary>
/// Gets a delegate that gets the value of a property of a value type. We unfortunately cannot avoid boxing the value type,
/// without making this generic over the value type. That would result in a large number of generic instantiations, and furthermore
/// does not work correctly on .NET Native (we cannot express the needed instantiations in an rd.xml file). We expect that user-defined
/// value types will be rare, and in any case the boxing only happens for events that are actually enabled.
/// </summary>
private static Func<PropertyValue, PropertyValue> GetBoxedValueTypePropertyGetter(PropertyInfo property)
{
var type = property.PropertyType;
if (type.GetTypeInfo().IsEnum)
type = Enum.GetUnderlyingType(type);
var factory = GetFactory(type);
return container => factory(property.GetValue(container.ReferenceValue));
}
/// <summary>
/// For properties of reference types, we use a generic helper class to get the value. This enables us to use MethodInfo.CreateDelegate
/// to build a fast getter. We can get away with this on .NET Native, because we really only need one runtime instantiation of the
/// generic type, since it's only instantiated over reference types (and thus all instances are shared).
/// </summary>
/// <param name="property"></param>
/// <returns></returns>
private static Func<PropertyValue, PropertyValue> GetReferenceTypePropertyGetter(PropertyInfo property)
{
var helper = (TypeHelper)Activator.CreateInstance(typeof(ReferenceTypeHelper<>).MakeGenericType(property.DeclaringType!))!;
return helper.GetPropertyGetter(property);
}
#if ES_BUILD_PN
public
#else
private
#endif
abstract class TypeHelper
{
public abstract Func<PropertyValue, PropertyValue> GetPropertyGetter(PropertyInfo property);
protected Delegate GetGetMethod(PropertyInfo property, Type propertyType)
{
return property.GetMethod!.CreateDelegate(typeof(Func<,>).MakeGenericType(property.DeclaringType!, propertyType));
}
}
#if ES_BUILD_PN
public
#else
private
#endif
sealed class ReferenceTypeHelper<TContainer> : TypeHelper where TContainer : class?
{
public override Func<PropertyValue, PropertyValue> GetPropertyGetter(PropertyInfo property)
{
var type = property.PropertyType;
if (!Statics.IsValueType(type))
{
var getter = (Func<TContainer, object?>)GetGetMethod(property, type);
return container => new PropertyValue(getter((TContainer)container.ReferenceValue!));
}
else
{
if (type.GetTypeInfo().IsEnum)
type = Enum.GetUnderlyingType(type);
if (type == typeof(bool)) { var f = (Func<TContainer, bool>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(byte)) { var f = (Func<TContainer, byte>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(sbyte)) { var f = (Func<TContainer, sbyte>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(char)) { var f = (Func<TContainer, char>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(short)) { var f = (Func<TContainer, short>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(ushort)) { var f = (Func<TContainer, ushort>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(int)) { var f = (Func<TContainer, int>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(uint)) { var f = (Func<TContainer, uint>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(long)) { var f = (Func<TContainer, long>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(ulong)) { var f = (Func<TContainer, ulong>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(IntPtr)) { var f = (Func<TContainer, IntPtr>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(UIntPtr)) { var f = (Func<TContainer, UIntPtr>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(float)) { var f = (Func<TContainer, float>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(double)) { var f = (Func<TContainer, double>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(Guid)) { var f = (Func<TContainer, Guid>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(DateTime)) { var f = (Func<TContainer, DateTime>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(DateTimeOffset)) { var f = (Func<TContainer, DateTimeOffset>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(TimeSpan)) { var f = (Func<TContainer, TimeSpan>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
if (type == typeof(decimal)) { var f = (Func<TContainer, decimal>)GetGetMethod(property, type); return container => new PropertyValue(f((TContainer)container.ReferenceValue!)); }
return container => new PropertyValue(property.GetValue(container.ReferenceValue));
}
}
}
}
}
| |
using System.Runtime.Remoting;
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.Linq;
using Umbraco.Core;
using Umbraco.Core.Models;
using Umbraco.Core.Models.Rdbms;
using Umbraco.Tests.CodeFirst.TestModels.Composition;
using Umbraco.Tests.TestHelpers;
using Umbraco.Tests.TestHelpers.Entities;
namespace Umbraco.Tests.Services
{
[DatabaseTestBehavior(DatabaseBehavior.NewDbFileAndSchemaPerTest)]
[TestFixture, RequiresSTA]
public class ContentTypeServiceTests : BaseServiceTest
{
[SetUp]
public override void Initialize()
{
base.Initialize();
}
[TearDown]
public override void TearDown()
{
base.TearDown();
}
[Test]
public void Deleting_PropertyType_Removes_The_Property_From_Content()
{
IContentType contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1");
ServiceContext.ContentTypeService.Save(contentType1);
IContent contentItem = MockedContent.CreateTextpageContent(contentType1, "Testing", -1);
ServiceContext.ContentService.SaveAndPublishWithStatus(contentItem);
var initProps = contentItem.Properties.Count;
var initPropTypes = contentItem.PropertyTypes.Count();
//remove a property
contentType1.RemovePropertyType(contentType1.PropertyTypes.First().Alias);
ServiceContext.ContentTypeService.Save(contentType1);
//re-load it from the db
contentItem = ServiceContext.ContentService.GetById(contentItem.Id);
Assert.AreEqual(initPropTypes - 1, contentItem.PropertyTypes.Count());
Assert.AreEqual(initProps - 1, contentItem.Properties.Count);
}
[Test]
public void Rebuild_Content_Xml_On_Alias_Change()
{
var contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1");
var contentType2 = MockedContentTypes.CreateTextpageContentType("test2", "Test2");
ServiceContext.ContentTypeService.Save(contentType1);
ServiceContext.ContentTypeService.Save(contentType2);
var contentItems1 = MockedContent.CreateTextpageContent(contentType1, -1, 10).ToArray();
contentItems1.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x));
var contentItems2 = MockedContent.CreateTextpageContent(contentType2, -1, 5).ToArray();
contentItems2.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x));
//only update the contentType1 alias which will force an xml rebuild for all content of that type
contentType1.Alias = "newAlias";
ServiceContext.ContentTypeService.Save(contentType1);
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.StartsWith("<newAlias"));
}
foreach (var c in contentItems2)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.StartsWith("<test2")); //should remain the same
}
}
[Test]
public void Rebuild_Content_Xml_On_Property_Removal()
{
var contentType1 = MockedContentTypes.CreateTextpageContentType("test1", "Test1");
ServiceContext.ContentTypeService.Save(contentType1);
var contentItems1 = MockedContent.CreateTextpageContent(contentType1, -1, 10).ToArray();
contentItems1.ForEach(x => ServiceContext.ContentService.SaveAndPublishWithStatus(x));
var alias = contentType1.PropertyTypes.First().Alias;
var elementToMatch = "<" + alias + ">";
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsTrue(xml.Xml.Contains(elementToMatch)); //verify that it is there before we remove the property
}
//remove a property
contentType1.RemovePropertyType(contentType1.PropertyTypes.First().Alias);
ServiceContext.ContentTypeService.Save(contentType1);
var reQueried = ServiceContext.ContentTypeService.GetContentType(contentType1.Id);
var reContent = ServiceContext.ContentService.GetById(contentItems1.First().Id);
foreach (var c in contentItems1)
{
var xml = DatabaseContext.Database.FirstOrDefault<ContentXmlDto>("WHERE nodeId = @Id", new { Id = c.Id });
Assert.IsNotNull(xml);
Assert.IsFalse(xml.Xml.Contains(elementToMatch)); //verify that it is no longer there
}
}
[Test]
public void Get_Descendants()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
var hierarchy = CreateContentTypeHierarchy();
contentTypeService.Save(hierarchy, 0); //ensure they are saved!
var master = hierarchy.First();
//Act
var descendants = master.Descendants();
//Assert
Assert.AreEqual(10, descendants.Count());
}
[Test]
public void Get_Descendants_And_Self()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
var hierarchy = CreateContentTypeHierarchy();
contentTypeService.Save(hierarchy, 0); //ensure they are saved!
var master = hierarchy.First();
//Act
var descendants = master.DescendantsAndSelf();
//Assert
Assert.AreEqual(11, descendants.Count());
}
[Test]
public void Get_With_Missing_Guid()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
//Act
var result = contentTypeService.GetMediaType(Guid.NewGuid());
//Assert
Assert.IsNull(result);
}
[Test]
public void Can_Bulk_Save_New_Hierarchy_Content_Types()
{
// Arrange
var contentTypeService = ServiceContext.ContentTypeService;
var hierarchy = CreateContentTypeHierarchy();
// Act
contentTypeService.Save(hierarchy, 0);
Assert.That(hierarchy.Any(), Is.True);
Assert.That(hierarchy.Any(x => x.HasIdentity == false), Is.False);
//all parent id's should be ok, they are lazy and if they equal zero an exception will be thrown
Assert.DoesNotThrow(() => hierarchy.Any(x => x.ParentId != 0));
for (var i = 0; i < hierarchy.Count(); i++)
{
if (i == 0) continue;
Assert.AreEqual(hierarchy.ElementAt(i).ParentId, hierarchy.ElementAt(i - 1).Id);
}
}
[Test]
public void Can_Save_ContentType_Structure_And_Create_Content_Based_On_It()
{
// Arrange
var cs = ServiceContext.ContentService;
var cts = ServiceContext.ContentTypeService;
var dtdYesNo = ServiceContext.DataTypeService.GetDataTypeDefinitionById(-49);
var ctBase = new ContentType(-1) { Name = "Base", Alias = "Base", Icon = "folder.gif", Thumbnail = "folder.png" };
ctBase.AddPropertyType(new PropertyType(dtdYesNo, Constants.Conventions.Content.NaviHide)
{
Name = "Hide From Navigation",
}
/*,"Navigation"*/);
cts.Save(ctBase);
const string contentTypeAlias = "HomePage";
var ctHomePage = new ContentType(ctBase, contentTypeAlias)
{
Name = "Home Page",
Alias = contentTypeAlias,
Icon = "settingDomain.gif",
Thumbnail = "folder.png",
AllowedAsRoot = true
};
ctHomePage.AddPropertyType(new PropertyType(dtdYesNo, "someProperty") { Name = "Some property" }
/*,"Navigation"*/);
cts.Save(ctHomePage);
// Act
var homeDoc = cs.CreateContent("Home Page", -1, contentTypeAlias);
cs.SaveAndPublishWithStatus(homeDoc);
// Assert
Assert.That(ctBase.HasIdentity, Is.True);
Assert.That(ctHomePage.HasIdentity, Is.True);
Assert.That(homeDoc.HasIdentity, Is.True);
Assert.That(homeDoc.ContentTypeId, Is.EqualTo(ctHomePage.Id));
}
[Test]
public void Create_Content_Type_Ensures_Sort_Orders()
{
var service = ServiceContext.ContentTypeService;
var contentType = new ContentType(-1)
{
Alias = "test",
Name = "Test",
Description = "ContentType used for simple text pages",
Icon = ".sprTreeDoc3",
Thumbnail = "doc2.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, DataTypeDefinitionId = -88 });
contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TinyMCEAlias, DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, DataTypeDefinitionId = -87 });
contentType.AddPropertyType(new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "Name of the author", Mandatory = false, DataTypeDefinitionId = -88 });
service.Save(contentType);
var sortOrders = contentType.PropertyTypes.Select(x => x.SortOrder).ToArray();
Assert.AreEqual(1, sortOrders.Count(x => x == 0));
Assert.AreEqual(1, sortOrders.Count(x => x == 1));
Assert.AreEqual(1, sortOrders.Count(x => x == 2));
}
[Test]
public void Can_Create_And_Save_ContentType_Composition()
{
/*
* Global
* - Components
* - Category
*/
var service = ServiceContext.ContentTypeService;
var global = MockedContentTypes.CreateSimpleContentType("global", "Global");
service.Save(global);
var components = MockedContentTypes.CreateSimpleContentType("components", "Components", global, true);
service.Save(components);
var component = MockedContentTypes.CreateSimpleContentType("component", "Component", components, true);
service.Save(component);
var category = MockedContentTypes.CreateSimpleContentType("category", "Category", global, true);
service.Save(category);
var success = category.AddContentType(component);
Assert.That(success, Is.False);
}
[Test]
public void Can_Remove_ContentType_Composition_From_ContentType()
{
//Test for U4-2234
var cts = ServiceContext.ContentTypeService;
//Arrange
var component = CreateComponent();
cts.Save(component);
var banner = CreateBannerComponent(component);
cts.Save(banner);
var site = CreateSite();
cts.Save(site);
var homepage = CreateHomepage(site);
cts.Save(homepage);
//Add banner to homepage
var added = homepage.AddContentType(banner);
cts.Save(homepage);
//Assert composition
var bannerExists = homepage.ContentTypeCompositionExists(banner.Alias);
var bannerPropertyExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName"));
Assert.That(added, Is.True);
Assert.That(bannerExists, Is.True);
Assert.That(bannerPropertyExists, Is.True);
Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(6));
//Remove banner from homepage
var removed = homepage.RemoveContentType(banner.Alias);
cts.Save(homepage);
//Assert composition
var bannerStillExists = homepage.ContentTypeCompositionExists(banner.Alias);
var bannerPropertyStillExists = homepage.CompositionPropertyTypes.Any(x => x.Alias.Equals("bannerName"));
Assert.That(removed, Is.True);
Assert.That(bannerStillExists, Is.False);
Assert.That(bannerPropertyStillExists, Is.False);
Assert.That(homepage.CompositionPropertyTypes.Count(), Is.EqualTo(4));
}
[Test]
public void Can_Copy_ContentType_By_Performing_Clone()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var metaContentType = MockedContentTypes.CreateMetaContentType();
service.Save(metaContentType);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType);
service.Save(simpleContentType);
var categoryId = simpleContentType.Id;
// Act
var sut = simpleContentType.DeepCloneWithResetIdentities("newcategory");
service.Save(sut);
// Assert
Assert.That(sut.HasIdentity, Is.True);
var contentType = service.GetContentType(sut.Id);
var category = service.GetContentType(categoryId);
Assert.That(contentType.CompositionAliases().Any(x => x.Equals("meta")), Is.True);
Assert.AreEqual(contentType.ParentId, category.ParentId);
Assert.AreEqual(contentType.Level, category.Level);
Assert.AreEqual(contentType.PropertyTypes.Count(), category.PropertyTypes.Count());
Assert.AreNotEqual(contentType.Id, category.Id);
Assert.AreNotEqual(contentType.Key, category.Key);
Assert.AreNotEqual(contentType.Path, category.Path);
Assert.AreNotEqual(contentType.SortOrder, category.SortOrder);
Assert.AreNotEqual(contentType.PropertyTypes.First(x => x.Alias.Equals("title")).Id, category.PropertyTypes.First(x => x.Alias.Equals("title")).Id);
Assert.AreNotEqual(contentType.PropertyGroups.First(x => x.Name.Equals("Content")).Id, category.PropertyGroups.First(x => x.Name.Equals("Content")).Id);
}
[Test]
public void Can_Copy_ContentType_To_New_Parent_By_Performing_Clone()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var parentContentType1 = MockedContentTypes.CreateSimpleContentType("parent1", "Parent1");
service.Save(parentContentType1);
var parentContentType2 = MockedContentTypes.CreateSimpleContentType("parent2", "Parent2", null, true);
service.Save(parentContentType2);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", parentContentType1, true);
service.Save(simpleContentType);
// Act
var clone = simpleContentType.DeepCloneWithResetIdentities("newcategory");
clone.RemoveContentType("parent1");
clone.AddContentType(parentContentType2);
clone.ParentId = parentContentType2.Id;
service.Save(clone);
// Assert
Assert.That(clone.HasIdentity, Is.True);
var clonedContentType = service.GetContentType(clone.Id);
var originalContentType = service.GetContentType(simpleContentType.Id);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent2")), Is.True);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent1")), Is.False);
Assert.AreEqual(clonedContentType.Path, "-1," + parentContentType2.Id + "," + clonedContentType.Id);
Assert.AreEqual(clonedContentType.PropertyTypes.Count(), originalContentType.PropertyTypes.Count());
Assert.AreNotEqual(clonedContentType.ParentId, originalContentType.ParentId);
Assert.AreEqual(clonedContentType.ParentId, parentContentType2.Id);
Assert.AreNotEqual(clonedContentType.Id, originalContentType.Id);
Assert.AreNotEqual(clonedContentType.Key, originalContentType.Key);
Assert.AreNotEqual(clonedContentType.Path, originalContentType.Path);
Assert.AreNotEqual(clonedContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id, originalContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id);
Assert.AreNotEqual(clonedContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id, originalContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id);
}
[Test]
public void Can_Copy_ContentType_With_Service_To_Root()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var metaContentType = MockedContentTypes.CreateMetaContentType();
service.Save(metaContentType);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", metaContentType);
service.Save(simpleContentType);
var categoryId = simpleContentType.Id;
// Act
var clone = service.Copy(simpleContentType, "newcategory", "new category");
// Assert
Assert.That(clone.HasIdentity, Is.True);
var cloned = service.GetContentType(clone.Id);
var original = service.GetContentType(categoryId);
Assert.That(cloned.CompositionAliases().Any(x => x.Equals("meta")), Is.False); //it's been copied to root
Assert.AreEqual(cloned.ParentId, -1);
Assert.AreEqual(cloned.Level, 1);
Assert.AreEqual(cloned.PropertyTypes.Count(), original.PropertyTypes.Count());
Assert.AreEqual(cloned.PropertyGroups.Count(), original.PropertyGroups.Count());
for (int i = 0; i < cloned.PropertyGroups.Count; i++)
{
Assert.AreEqual(cloned.PropertyGroups[i].PropertyTypes.Count, original.PropertyGroups[i].PropertyTypes.Count);
foreach (var propertyType in cloned.PropertyGroups[i].PropertyTypes)
{
Assert.IsTrue(propertyType.HasIdentity);
}
}
foreach (var propertyType in cloned.PropertyTypes)
{
Assert.IsTrue(propertyType.HasIdentity);
}
Assert.AreNotEqual(cloned.Id, original.Id);
Assert.AreNotEqual(cloned.Key, original.Key);
Assert.AreNotEqual(cloned.Path, original.Path);
Assert.AreNotEqual(cloned.SortOrder, original.SortOrder);
Assert.AreNotEqual(cloned.PropertyTypes.First(x => x.Alias.Equals("title")).Id, original.PropertyTypes.First(x => x.Alias.Equals("title")).Id);
Assert.AreNotEqual(cloned.PropertyGroups.First(x => x.Name.Equals("Content")).Id, original.PropertyGroups.First(x => x.Name.Equals("Content")).Id);
}
[Test]
public void Can_Copy_ContentType_To_New_Parent_With_Service()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var parentContentType1 = MockedContentTypes.CreateSimpleContentType("parent1", "Parent1");
service.Save(parentContentType1);
var parentContentType2 = MockedContentTypes.CreateSimpleContentType("parent2", "Parent2", null, true);
service.Save(parentContentType2);
var simpleContentType = MockedContentTypes.CreateSimpleContentType("category", "Category", parentContentType1, true);
service.Save(simpleContentType);
// Act
var clone = service.Copy(simpleContentType, "newAlias", "new alias", parentContentType2);
// Assert
Assert.That(clone.HasIdentity, Is.True);
var clonedContentType = service.GetContentType(clone.Id);
var originalContentType = service.GetContentType(simpleContentType.Id);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent2")), Is.True);
Assert.That(clonedContentType.CompositionAliases().Any(x => x.Equals("parent1")), Is.False);
Assert.AreEqual(clonedContentType.Path, "-1," + parentContentType2.Id + "," + clonedContentType.Id);
Assert.AreEqual(clonedContentType.PropertyTypes.Count(), originalContentType.PropertyTypes.Count());
Assert.AreNotEqual(clonedContentType.ParentId, originalContentType.ParentId);
Assert.AreEqual(clonedContentType.ParentId, parentContentType2.Id);
Assert.AreNotEqual(clonedContentType.Id, originalContentType.Id);
Assert.AreNotEqual(clonedContentType.Key, originalContentType.Key);
Assert.AreNotEqual(clonedContentType.Path, originalContentType.Path);
Assert.AreNotEqual(clonedContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id, originalContentType.PropertyTypes.First(x => x.Alias.StartsWith("title")).Id);
Assert.AreNotEqual(clonedContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id, originalContentType.PropertyGroups.First(x => x.Name.StartsWith("Content")).Id);
}
[Test]
public void Cannot_Add_Duplicate_PropertyType_Alias_To_Referenced_Composition()
{
//Related the second issue in screencast from this post http://issues.umbraco.org/issue/U4-5986
// Arrange
var service = ServiceContext.ContentTypeService;
var parent = MockedContentTypes.CreateSimpleContentType();
service.Save(parent);
var child = MockedContentTypes.CreateSimpleContentType("simpleChildPage", "Simple Child Page", parent, true);
service.Save(child);
var composition = MockedContentTypes.CreateMetaContentType();
service.Save(composition);
//Adding Meta-composition to child doc type
child.AddContentType(composition);
service.Save(child);
// Act
var duplicatePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var added = composition.AddPropertyType(duplicatePropertyType, "Meta");
// Assert
Assert.That(added, Is.True);
Assert.Throws<Exception>(() => service.Save(composition));
Assert.DoesNotThrow(() => service.GetContentType("simpleChildPage"));
}
[Test]
public void Cannot_Add_Duplicate_PropertyType_Alias_In_Composition_Graph()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateSimpleContentType("basePage", "Base Page", null, true);
service.Save(basePage);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true);
service.Save(advancedPage);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
var seoComposition = MockedContentTypes.CreateSeoContentType();
service.Save(seoComposition);
var metaAdded = contentPage.AddContentType(metaComposition);
service.Save(contentPage);
var seoAdded = advancedPage.AddContentType(seoComposition);
service.Save(advancedPage);
// Act
var duplicatePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var addedToBasePage = basePage.AddPropertyType(duplicatePropertyType, "Content");
var addedToAdvancedPage = advancedPage.AddPropertyType(duplicatePropertyType, "Content");
var addedToMeta = metaComposition.AddPropertyType(duplicatePropertyType, "Meta");
var addedToSeo = seoComposition.AddPropertyType(duplicatePropertyType, "Seo");
// Assert
Assert.That(metaAdded, Is.True);
Assert.That(seoAdded, Is.True);
Assert.That(addedToBasePage, Is.True);
Assert.That(addedToAdvancedPage, Is.False);
Assert.That(addedToMeta, Is.True);
Assert.That(addedToSeo, Is.True);
Assert.Throws<Exception>(() => service.Save(basePage));
Assert.Throws<Exception>(() => service.Save(metaComposition));
Assert.Throws<Exception>(() => service.Save(seoComposition));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
Assert.DoesNotThrow(() => service.GetContentType("meta"));
Assert.DoesNotThrow(() => service.GetContentType("seo"));
}
[Test]
public void Cannot_Add_Duplicate_PropertyType_Alias_At_Root_Which_Conflicts_With_Third_Levels_Composition()
{
/*
* BasePage, gets 'Title' added but should not be allowed
* -- Content Page
* ---- Advanced Page -> Content Meta
* Content Meta :: Composition, has 'Title'
*
* Content Meta has 'Title' PropertyType
* Adding 'Title' to BasePage should fail
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var compositionAdded = advancedPage.AddContentType(contentMetaComposition);
service.Save(advancedPage);
//NOTE: It should not be possible to Save 'BasePage' with the Title PropertyType added
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = basePage.AddPropertyType(titlePropertyType, "Content");
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(compositionAdded, Is.True);
Assert.Throws<Exception>(() => service.Save(basePage));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
}
[Test]
public void Cannot_Rename_PropertyType_Alias_On_Composition_Which_Would_Cause_Conflict_In_Other_Composition()
{
/*
* Meta renames alias to 'title'
* Seo has 'Title'
* BasePage
* -- ContentPage
* ---- AdvancedPage -> Seo
* ------ MoreAdvanedPage -> Meta
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var moreAdvancedPage = MockedContentTypes.CreateBasicContentType("moreAdvancedPage", "More Advanced Page", advancedPage);
service.Save(moreAdvancedPage);
var seoComposition = MockedContentTypes.CreateSeoContentType();
service.Save(seoComposition);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = advancedPage.AddPropertyType(subtitlePropertyType, "Content");
service.Save(advancedPage);
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = seoComposition.AddPropertyType(titlePropertyType, "Content");
service.Save(seoComposition);
var seoCompositionAdded = advancedPage.AddContentType(seoComposition);
var metaCompositionAdded = moreAdvancedPage.AddContentType(metaComposition);
service.Save(advancedPage);
service.Save(moreAdvancedPage);
var keywordsPropertyType = metaComposition.PropertyTypes.First(x => x.Alias.Equals("metakeywords"));
keywordsPropertyType.Alias = "title";
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(seoCompositionAdded, Is.True);
Assert.That(metaCompositionAdded, Is.True);
Assert.Throws<Exception>(() => service.Save(metaComposition));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
Assert.DoesNotThrow(() => service.GetContentType("moreAdvancedPage"));
}
[Test]
public void Can_Add_Additional_Properties_On_Composition_Once_Composition_Has_Been_Saved()
{
/*
* Meta renames alias to 'title'
* Seo has 'Title'
* BasePage
* -- ContentPage
* ---- AdvancedPage -> Seo
* ------ MoreAdvancedPage -> Meta
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var moreAdvancedPage = MockedContentTypes.CreateBasicContentType("moreAdvancedPage", "More Advanced Page", advancedPage);
service.Save(moreAdvancedPage);
var seoComposition = MockedContentTypes.CreateSeoContentType();
service.Save(seoComposition);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = advancedPage.AddPropertyType(subtitlePropertyType, "Content");
service.Save(advancedPage);
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = seoComposition.AddPropertyType(titlePropertyType, "Content");
service.Save(seoComposition);
var seoCompositionAdded = advancedPage.AddContentType(seoComposition);
var metaCompositionAdded = moreAdvancedPage.AddContentType(metaComposition);
service.Save(advancedPage);
service.Save(moreAdvancedPage);
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(seoCompositionAdded, Is.True);
Assert.That(metaCompositionAdded, Is.True);
var testPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "test")
{
Name = "Test", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var testAdded = seoComposition.AddPropertyType(testPropertyType, "Content");
service.Save(seoComposition);
Assert.That(testAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
Assert.DoesNotThrow(() => service.GetContentType("moreAdvancedPage"));
}
[Test]
public void Cannot_Rename_PropertyGroup_On_Child_Avoiding_Conflict_With_Parent_PropertyGroup()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Content_");
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true, "Details");
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
//Change the name of the tab on the "root" content type 'page'.
var propertyGroup = contentPage.PropertyGroups["Content_"];
Assert.Throws<Exception>(() => contentPage.PropertyGroups.Add(new PropertyGroup
{
Id = propertyGroup.Id,
Name = "Content",
SortOrder = 0
}));
// Assert
Assert.That(compositionAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
}
[Test]
public void Cannot_Rename_PropertyType_Alias_Causing_Conflicts_With_Parents()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
// Act
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var titleAdded = basePage.AddPropertyType(titlePropertyType, "Content");
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = contentPage.AddPropertyType(bodyTextPropertyType, "Content");
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = advancedPage.AddPropertyType(authorPropertyType, "Content");
service.Save(basePage);
service.Save(contentPage);
service.Save(advancedPage);
//Rename the PropertyType to something that already exists in the Composition - NOTE this should not be allowed and Saving should throw an exception
var authorPropertyTypeToRename = advancedPage.PropertyTypes.First(x => x.Alias.Equals("author"));
authorPropertyTypeToRename.Alias = "title";
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(titleAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.Throws<Exception>(() => service.Save(advancedPage));
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
}
[Test]
public void Can_Add_PropertyType_Alias_Which_Exists_In_Composition_Outside_Graph()
{
/*
* Meta (Composition)
* Content Meta (Composition) has 'Title' -> Meta
* BasePage
* -- ContentPage gets 'Title' added -> Meta
* ---- Advanced Page
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateSimpleContentType("basePage", "Base Page", null, true);
service.Save(basePage);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", basePage, true);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true);
service.Save(advancedPage);
var metaComposition = MockedContentTypes.CreateMetaContentType();
service.Save(metaComposition);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
var metaAdded = contentPage.AddContentType(metaComposition);
service.Save(contentPage);
var metaAddedToComposition = contentMetaComposition.AddContentType(metaComposition);
service.Save(contentMetaComposition);
// Act
var propertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var addedToContentPage = contentPage.AddPropertyType(propertyType, "Content");
// Assert
Assert.That(metaAdded, Is.True);
Assert.That(metaAddedToComposition, Is.True);
Assert.That(addedToContentPage, Is.True);
Assert.DoesNotThrow(() => service.Save(contentPage));
}
[Test]
public void Can_Rename_PropertyGroup_With_Inherited_PropertyGroups()
{
//Related the first issue in screencast from this post http://issues.umbraco.org/issue/U4-5986
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, false, "Content_");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true);
service.Save(contentPage);
var composition = MockedContentTypes.CreateMetaContentType();
composition.AddPropertyGroup("Content");
service.Save(composition);
//Adding Meta-composition to child doc type
contentPage.AddContentType(composition);
service.Save(contentPage);
// Act
var propertyTypeOne = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "testTextbox")
{
Name = "Test Textbox", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var firstOneAdded = contentPage.AddPropertyType(propertyTypeOne, "Content_");
var propertyTypeTwo = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "anotherTextbox")
{
Name = "Another Test Textbox", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var secondOneAdded = contentPage.AddPropertyType(propertyTypeTwo, "Content");
service.Save(contentPage);
Assert.That(page.PropertyGroups.Contains("Content_"), Is.True);
var propertyGroup = page.PropertyGroups["Content_"];
page.PropertyGroups.Add(new PropertyGroup{ Id = propertyGroup.Id, Name = "ContentTab", SortOrder = 0});
service.Save(page);
// Assert
Assert.That(firstOneAdded, Is.True);
Assert.That(secondOneAdded, Is.True);
var contentType = service.GetContentType("contentPage");
Assert.That(contentType, Is.Not.Null);
var compositionPropertyGroups = contentType.CompositionPropertyGroups;
Assert.That(compositionPropertyGroups.Count(x => x.Name.Equals("Content_")), Is.EqualTo(0));
var propertyTypeCount = contentType.PropertyTypes.Count();
var compPropertyTypeCount = contentType.CompositionPropertyTypes.Count();
Assert.That(propertyTypeCount, Is.EqualTo(5));
Assert.That(compPropertyTypeCount, Is.EqualTo(10));
}
[Test]
public void Can_Rename_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content_");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Contentx");
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateSimpleContentType("advancedPage", "Advanced Page", contentPage, true, "Contenty");
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = contentPage.AddPropertyType(bodyTextPropertyType, "Content_");//Will be added to the parent tab
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");//Will be added to the "Content Meta" composition
service.Save(contentPage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var descriptionPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "description")
{
Name = "Description", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var keywordsPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "keywords")
{
Name = "Keywords", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = advancedPage.AddPropertyType(authorPropertyType, "Content_");//Will be added to an ancestor tab
var descriptionAdded = advancedPage.AddPropertyType(descriptionPropertyType, "Contentx");//Will be added to a parent tab
var keywordsAdded = advancedPage.AddPropertyType(keywordsPropertyType, "Content");//Will be added to the "Content Meta" composition
service.Save(advancedPage);
//Change the name of the tab on the "root" content type 'page'.
var propertyGroup = page.PropertyGroups["Content_"];
page.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 });
service.Save(page);
// Assert
Assert.That(compositionAdded, Is.True);
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(descriptionAdded, Is.True);
Assert.That(keywordsAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
var advancedPageReloaded = service.GetContentType("advancedPage");
var contentUnderscoreTabExists = advancedPageReloaded.CompositionPropertyGroups.Any(x => x.Name.Equals("Content_"));
Assert.That(contentUnderscoreTabExists, Is.False);
var numberOfContentTabs = advancedPageReloaded.CompositionPropertyGroups.Count(x => x.Name.Equals("Content"));
Assert.That(numberOfContentTabs, Is.EqualTo(4));
}
[Test]
public void Can_Rename_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups_v2()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var page = MockedContentTypes.CreateSimpleContentType("page", "Page", null, true, "Content_");
service.Save(page);
var contentPage = MockedContentTypes.CreateSimpleContentType("contentPage", "Content Page", page, true, "Content");
service.Save(contentPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var subtitlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "subtitle")
{
Name = "Subtitle", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = page.AddPropertyType(bodyTextPropertyType, "Content_");
var subtitleAdded = contentPage.AddPropertyType(subtitlePropertyType, "Content");
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content_");
service.Save(page);
service.Save(contentPage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
//Change the name of the tab on the "root" content type 'page'.
var propertyGroup = page.PropertyGroups["Content_"];
page.PropertyGroups.Add(new PropertyGroup { Id = propertyGroup.Id, Name = "Content", SortOrder = 0 });
service.Save(page);
// Assert
Assert.That(compositionAdded, Is.True);
Assert.That(bodyTextAdded, Is.True);
Assert.That(subtitleAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
}
[Test]
public void Can_Remove_PropertyGroup_On_Parent_Without_Causing_Duplicate_PropertyGroups()
{
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
basePage.RemovePropertyGroup("Content");
service.Save(basePage);
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(compositionAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
var contentType = service.GetContentType("contentPage");
var propertyGroup = contentType.PropertyGroups["Content"];
Assert.That(propertyGroup.ParentId.HasValue, Is.False);
}
[Test]
public void Can_Remove_PropertyGroup_Without_Removing_Property_Types()
{
var service = ServiceContext.ContentTypeService;
var basePage = (IContentType)MockedContentTypes.CreateBasicContentType();
basePage.AddPropertyGroup("Content");
basePage.AddPropertyGroup("Meta");
service.Save(basePage);
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author",
Description = "",
Mandatory = false,
SortOrder = 1,
DataTypeDefinitionId = -88
};
var authorAdded = basePage.AddPropertyType(authorPropertyType, "Content");
var titlePropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "title")
{
Name = "Title",
Description = "",
Mandatory = false,
SortOrder = 1,
DataTypeDefinitionId = -88
};
var titleAdded = basePage.AddPropertyType(authorPropertyType, "Meta");
service.Save(basePage);
basePage = service.GetContentType(basePage.Id);
var totalPt = basePage.PropertyTypes.Count();
basePage.RemovePropertyGroup("Content");
service.Save(basePage);
basePage = service.GetContentType(basePage.Id);
Assert.AreEqual(totalPt, basePage.PropertyTypes.Count());
}
[Test]
public void Can_Add_PropertyGroup_With_Same_Name_On_Parent_and_Child()
{
/*
* BasePage
* - Content Page
* -- Advanced Page
* Content Meta :: Composition
*/
// Arrange
var service = ServiceContext.ContentTypeService;
var basePage = MockedContentTypes.CreateBasicContentType();
service.Save(basePage);
var contentPage = MockedContentTypes.CreateBasicContentType("contentPage", "Content Page", basePage);
service.Save(contentPage);
var advancedPage = MockedContentTypes.CreateBasicContentType("advancedPage", "Advanced Page", contentPage);
service.Save(advancedPage);
var contentMetaComposition = MockedContentTypes.CreateContentMetaContentType();
service.Save(contentMetaComposition);
// Act
var authorPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "author")
{
Name = "Author", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var authorAdded = contentPage.AddPropertyType(authorPropertyType, "Content");
service.Save(contentPage);
var bodyTextPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext, "bodyText")
{
Name = "Body Text", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88
};
var bodyTextAdded = basePage.AddPropertyType(bodyTextPropertyType, "Content");
service.Save(basePage);
var compositionAdded = contentPage.AddContentType(contentMetaComposition);
service.Save(contentPage);
// Assert
Assert.That(bodyTextAdded, Is.True);
Assert.That(authorAdded, Is.True);
Assert.That(compositionAdded, Is.True);
Assert.DoesNotThrow(() => service.GetContentType("contentPage"));
Assert.DoesNotThrow(() => service.GetContentType("advancedPage"));
var contentType = service.GetContentType("contentPage");
var propertyGroup = contentType.PropertyGroups["Content"];
Assert.That(propertyGroup.ParentId.HasValue, Is.False);
var numberOfContentTabs = contentType.CompositionPropertyGroups.Count(x => x.Name.Equals("Content"));
Assert.That(numberOfContentTabs, Is.EqualTo(3));
//Ensure that adding a new PropertyType to the "Content"-tab also adds it to the right group
var descriptionPropertyType = new PropertyType(Constants.PropertyEditors.TextboxAlias, DataTypeDatabaseType.Ntext)
{
Alias = "description", Name = "Description", Description = "", Mandatory = false, SortOrder = 1,DataTypeDefinitionId = -88
};
var descriptionAdded = contentType.AddPropertyType(descriptionPropertyType, "Content");
service.Save(contentType);
Assert.That(descriptionAdded, Is.True);
var contentPageReloaded = service.GetContentType("contentPage");
var propertyGroupReloaded = contentPageReloaded.PropertyGroups["Content"];
var hasDescriptionPropertyType = propertyGroupReloaded.PropertyTypes.Contains("description");
Assert.That(hasDescriptionPropertyType, Is.True);
Assert.That(propertyGroupReloaded.ParentId.HasValue, Is.False);
var descriptionPropertyTypeReloaded = propertyGroupReloaded.PropertyTypes["description"];
Assert.That(descriptionPropertyTypeReloaded.PropertyGroupId.IsValueCreated, Is.False);
}
private ContentType CreateComponent()
{
var component = new ContentType(-1)
{
Alias = "component",
Name = "Component",
Description = "ContentType used for Component grouping",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
var contentCollection = new PropertyTypeCollection();
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "componentGroup") { Name = "Component Group", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
component.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Component", SortOrder = 1 });
return component;
}
private ContentType CreateBannerComponent(ContentType parent)
{
const string contentTypeAlias = "banner";
var banner = new ContentType(parent, contentTypeAlias)
{
Alias = contentTypeAlias,
Name = "Banner Component",
Description = "ContentType used for Banner Component",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
var propertyType = new PropertyType("test", DataTypeDatabaseType.Ntext, "bannerName")
{
Name = "Banner Name",
Description = "",
Mandatory = false,
SortOrder = 2,
DataTypeDefinitionId = -88
};
banner.AddPropertyType(propertyType, "Component");
return banner;
}
private ContentType CreateSite()
{
var site = new ContentType(-1)
{
Alias = "site",
Name = "Site",
Description = "ContentType used for Site inheritence",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 2,
CreatorId = 0,
Trashed = false
};
var contentCollection = new PropertyTypeCollection();
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "hostname") { Name = "Hostname", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
site.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Site Settings", SortOrder = 1 });
return site;
}
private ContentType CreateHomepage(ContentType parent)
{
const string contentTypeAlias = "homepage";
var contentType = new ContentType(parent, contentTypeAlias)
{
Alias = contentTypeAlias,
Name = "Homepage",
Description = "ContentType used for the Homepage",
Icon = ".sprTreeDoc3",
Thumbnail = "doc.png",
SortOrder = 1,
CreatorId = 0,
Trashed = false
};
var contentCollection = new PropertyTypeCollection();
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "title") { Name = "Title", Description = "", Mandatory = false, SortOrder = 1, DataTypeDefinitionId = -88 });
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "bodyText") { Name = "Body Text", Description = "", Mandatory = false, SortOrder = 2, DataTypeDefinitionId = -87 });
contentCollection.Add(new PropertyType("test", DataTypeDatabaseType.Ntext, "author") { Name = "Author", Description = "Name of the author", Mandatory = false, SortOrder = 3, DataTypeDefinitionId = -88 });
contentType.PropertyGroups.Add(new PropertyGroup(contentCollection) { Name = "Content", SortOrder = 1 });
return contentType;
}
private IContentType[] CreateContentTypeHierarchy()
{
//create the master type
var masterContentType = MockedContentTypes.CreateSimpleContentType("masterContentType", "MasterContentType");
masterContentType.Key = new Guid("C00CA18E-5A9D-483B-A371-EECE0D89B4AE");
ServiceContext.ContentTypeService.Save(masterContentType);
//add the one we just created
var list = new List<IContentType> { masterContentType };
for (var i = 0; i < 10; i++)
{
var contentType = MockedContentTypes.CreateSimpleContentType("childType" + i, "ChildType" + i,
//make the last entry in the list, this one's parent
list.Last(), true);
list.Add(contentType);
}
return list.ToArray();
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
namespace DalSic
{
/// <summary>
/// Strongly-typed collection for the RisPoblacionVulnerable class.
/// </summary>
[Serializable]
public partial class RisPoblacionVulnerableCollection : ActiveList<RisPoblacionVulnerable, RisPoblacionVulnerableCollection>
{
public RisPoblacionVulnerableCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>RisPoblacionVulnerableCollection</returns>
public RisPoblacionVulnerableCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
RisPoblacionVulnerable o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the RIS_PoblacionVulnerable table.
/// </summary>
[Serializable]
public partial class RisPoblacionVulnerable : ActiveRecord<RisPoblacionVulnerable>, IActiveRecord
{
#region .ctors and Default Settings
public RisPoblacionVulnerable()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public RisPoblacionVulnerable(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public RisPoblacionVulnerable(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public RisPoblacionVulnerable(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("RIS_PoblacionVulnerable", TableType.Table, DataService.GetInstance("sicProvider"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarIdPoblacionVulnerable = new TableSchema.TableColumn(schema);
colvarIdPoblacionVulnerable.ColumnName = "idPoblacionVulnerable";
colvarIdPoblacionVulnerable.DataType = DbType.Int32;
colvarIdPoblacionVulnerable.MaxLength = 0;
colvarIdPoblacionVulnerable.AutoIncrement = true;
colvarIdPoblacionVulnerable.IsNullable = false;
colvarIdPoblacionVulnerable.IsPrimaryKey = true;
colvarIdPoblacionVulnerable.IsForeignKey = false;
colvarIdPoblacionVulnerable.IsReadOnly = false;
colvarIdPoblacionVulnerable.DefaultSetting = @"";
colvarIdPoblacionVulnerable.ForeignKeyTableName = "";
schema.Columns.Add(colvarIdPoblacionVulnerable);
TableSchema.TableColumn colvarDescripcion = new TableSchema.TableColumn(schema);
colvarDescripcion.ColumnName = "descripcion";
colvarDescripcion.DataType = DbType.AnsiString;
colvarDescripcion.MaxLength = 100;
colvarDescripcion.AutoIncrement = false;
colvarDescripcion.IsNullable = false;
colvarDescripcion.IsPrimaryKey = false;
colvarDescripcion.IsForeignKey = false;
colvarDescripcion.IsReadOnly = false;
colvarDescripcion.DefaultSetting = @"";
colvarDescripcion.ForeignKeyTableName = "";
schema.Columns.Add(colvarDescripcion);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["sicProvider"].AddSchema("RIS_PoblacionVulnerable",schema);
}
}
#endregion
#region Props
[XmlAttribute("IdPoblacionVulnerable")]
[Bindable(true)]
public int IdPoblacionVulnerable
{
get { return GetColumnValue<int>(Columns.IdPoblacionVulnerable); }
set { SetColumnValue(Columns.IdPoblacionVulnerable, value); }
}
[XmlAttribute("Descripcion")]
[Bindable(true)]
public string Descripcion
{
get { return GetColumnValue<string>(Columns.Descripcion); }
set { SetColumnValue(Columns.Descripcion, value); }
}
#endregion
//no foreign key tables defined (0)
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varDescripcion)
{
RisPoblacionVulnerable item = new RisPoblacionVulnerable();
item.Descripcion = varDescripcion;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(int varIdPoblacionVulnerable,string varDescripcion)
{
RisPoblacionVulnerable item = new RisPoblacionVulnerable();
item.IdPoblacionVulnerable = varIdPoblacionVulnerable;
item.Descripcion = varDescripcion;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdPoblacionVulnerableColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn DescripcionColumn
{
get { return Schema.Columns[1]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string IdPoblacionVulnerable = @"idPoblacionVulnerable";
public static string Descripcion = @"descripcion";
}
#endregion
#region Update PK Collections
#endregion
#region Deep Save
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Net.NetworkInformation;
using System.Net.Sockets;
using System.Text;
using System.Threading.Tasks;
using System.Xml;
namespace Orleans.Runtime.Configuration
{
/// <summary>
/// Data object holding Silo configuration parameters.
/// </summary>
[Serializable]
public class ClusterConfiguration
{
/// <summary>
/// The global configuration parameters that apply uniformly to all silos.
/// </summary>
public GlobalConfiguration Globals { get; private set; }
/// <summary>
/// The default configuration parameters that apply to each and every silo.
/// These can be over-written on a per silo basis.
/// </summary>
public NodeConfiguration Defaults { get; private set; }
/// <summary>
/// The configuration file.
/// </summary>
public string SourceFile { get; private set; }
private IPEndPoint primaryNode;
/// <summary>
/// The Primary Node IP and port (in dev setting).
/// </summary>
public IPEndPoint PrimaryNode { get { return primaryNode; } set { SetPrimaryNode(value); } }
/// <summary>
/// Per silo configuration parameters overrides.
/// </summary>
public IDictionary<string, NodeConfiguration> Overrides { get; private set; }
private Dictionary<string, string> overrideXml;
private readonly Dictionary<string, List<Action>> listeners = new Dictionary<string, List<Action>>();
internal bool IsRunningAsUnitTest { get; set; }
/// <summary>
/// ClusterConfiguration constructor.
/// </summary>
public ClusterConfiguration()
{
Init();
}
/// <summary>
/// ClusterConfiguration constructor.
/// </summary>
public ClusterConfiguration(TextReader input)
{
Load(input);
}
private void Init()
{
Globals = new GlobalConfiguration();
Defaults = new NodeConfiguration();
Overrides = new Dictionary<string, NodeConfiguration>();
overrideXml = new Dictionary<string, string>();
SourceFile = "";
IsRunningAsUnitTest = false;
}
/// <summary>
/// Loads configuration from a given input text reader.
/// </summary>
/// <param name="input">The TextReader to use.</param>
public void Load(TextReader input)
{
Init();
LoadFromXml(ParseXml(input));
}
internal void LoadFromXml(XmlElement root)
{
foreach (XmlNode c in root.ChildNodes)
{
var child = c as XmlElement;
if (child == null) continue; // Skip comment lines
switch (child.LocalName)
{
case "Globals":
Globals.Load(child);
// set subnets so this is independent of order
Defaults.Subnet = Globals.Subnet;
foreach (var o in Overrides.Values)
{
o.Subnet = Globals.Subnet;
}
if (Globals.SeedNodes.Count > 0)
{
primaryNode = Globals.SeedNodes[0];
}
break;
case "Defaults":
Defaults.Load(child);
Defaults.Subnet = Globals.Subnet;
break;
case "Override":
overrideXml[child.GetAttribute("Node")] = WriteXml(child);
break;
}
}
CalculateOverrides();
}
private static string WriteXml(XmlElement element)
{
using(var sw = new StringWriter())
{
using(var xw = XmlWriter.Create(sw))
{
element.WriteTo(xw);
xw.Flush();
return sw.ToString();
}
}
}
private void CalculateOverrides()
{
if (Globals.LivenessEnabled &&
Globals.LivenessType == GlobalConfiguration.LivenessProviderType.NotSpecified)
{
if (Globals.UseSqlSystemStore)
{
Globals.LivenessType = GlobalConfiguration.LivenessProviderType.SqlServer;
}
else if (Globals.UseAzureSystemStore)
{
Globals.LivenessType = GlobalConfiguration.LivenessProviderType.AzureTable;
}
else if (Globals.UseZooKeeperSystemStore)
{
Globals.LivenessType = GlobalConfiguration.LivenessProviderType.ZooKeeper;
}
else
{
Globals.LivenessType = GlobalConfiguration.LivenessProviderType.MembershipTableGrain;
}
}
if (Globals.UseMockReminderTable)
{
Globals.SetReminderServiceType(GlobalConfiguration.ReminderServiceProviderType.MockTable);
}
else if (Globals.ReminderServiceType == GlobalConfiguration.ReminderServiceProviderType.NotSpecified)
{
if (Globals.UseSqlSystemStore)
{
Globals.SetReminderServiceType(GlobalConfiguration.ReminderServiceProviderType.SqlServer);
}
else if (Globals.UseAzureSystemStore)
{
Globals.SetReminderServiceType(GlobalConfiguration.ReminderServiceProviderType.AzureTable);
}
else if (Globals.UseZooKeeperSystemStore)
{
Globals.SetReminderServiceType(GlobalConfiguration.ReminderServiceProviderType.Disabled);
}
else
{
Globals.SetReminderServiceType(GlobalConfiguration.ReminderServiceProviderType.ReminderTableGrain);
}
}
foreach (var p in overrideXml)
{
var n = new NodeConfiguration(Defaults);
n.Load(ParseXml(new StringReader(p.Value)));
InitNodeSettingsFromGlobals(n);
Overrides[n.SiloName] = n;
}
}
private void InitNodeSettingsFromGlobals(NodeConfiguration n)
{
if (n.Endpoint.Equals(this.PrimaryNode)) n.IsPrimaryNode = true;
if (Globals.SeedNodes.Contains(n.Endpoint)) n.IsSeedNode = true;
}
/// <summary>Loads the configuration from a file</summary>
/// <param name="fileName">The file path.</param>
public void LoadFromFile(string fileName)
{
using (TextReader input = File.OpenText(fileName))
{
Load(input);
SourceFile = fileName;
}
}
/// <summary>
/// Obtains the configuration for a given silo.
/// </summary>
/// <param name="siloName">Silo name.</param>
/// <param name="siloNode">NodeConfiguration associated with the specified silo.</param>
/// <returns>true if node was found</returns>
public bool TryGetNodeConfigurationForSilo(string siloName, out NodeConfiguration siloNode)
{
return Overrides.TryGetValue(siloName, out siloNode);
}
/// <summary>
/// Creates a configuration node for a given silo.
/// </summary>
/// <param name="siloName">Silo name.</param>
/// <returns>NodeConfiguration associated with the specified silo.</returns>
public NodeConfiguration CreateNodeConfigurationForSilo(string siloName)
{
var siloNode = new NodeConfiguration(Defaults) { SiloName = siloName };
InitNodeSettingsFromGlobals(siloNode);
Overrides[siloName] = siloNode;
return siloNode;
}
/// <summary>
/// Creates a node config for the specified silo if one does not exist. Returns existing node if one already exists
/// </summary>
/// <param name="siloName">Silo name.</param>
/// <returns>NodeConfiguration associated with the specified silo.</returns>
public NodeConfiguration GetOrCreateNodeConfigurationForSilo(string siloName)
{
NodeConfiguration siloNode;
return !TryGetNodeConfigurationForSilo(siloName, out siloNode) ? CreateNodeConfigurationForSilo(siloName) : siloNode;
}
private void SetPrimaryNode(IPEndPoint primary)
{
primaryNode = primary;
foreach (NodeConfiguration node in Overrides.Values)
{
if (node.Endpoint.Equals(primary))
{
node.IsPrimaryNode = true;
}
}
}
/// <summary>
/// Loads the configuration from the standard paths
/// </summary>
/// <returns></returns>
public void StandardLoad()
{
string fileName = ConfigUtilities.FindConfigFile(true); // Throws FileNotFoundException
LoadFromFile(fileName);
}
/// <summary>
/// Subset of XML configuration file that is updatable at runtime
/// </summary>
private static readonly XmlElement updatableXml = ParseXml(new StringReader(@"
<OrleansConfiguration>
<Globals>
<Messaging ResponseTimeout=""?""/>
<Caching CacheSize=""?""/>
<Liveness ProbeTimeout=""?"" TableRefreshTimeout=""?"" NumMissedProbesLimit=""?""/>
</Globals>
<Defaults>
<LoadShedding Enabled=""?"" LoadLimit=""?""/>
<Tracing DefaultTraceLevel=""?"" PropagateActivityId=""?"">
<TraceLevelOverride LogPrefix=""?"" TraceLevel=""?""/>
</Tracing>
</Defaults>
</OrleansConfiguration>"));
/// <summary>
/// Updates existing configuration.
/// </summary>
/// <param name="input">The input string in XML format to use to update the existing configuration.</param>
/// <returns></returns>
public void Update(string input)
{
var xml = ParseXml(new StringReader(input));
var disallowed = new List<string>();
CheckSubtree(updatableXml, xml, "", disallowed);
if (disallowed.Count > 0)
throw new ArgumentException("Cannot update configuration with" + disallowed.ToStrings());
var dict = ToChildDictionary(xml);
XmlElement globals;
if (dict.TryGetValue("Globals", out globals))
{
Globals.Load(globals);
ConfigChanged("Globals");
foreach (var key in ToChildDictionary(globals).Keys)
{
ConfigChanged("Globals/" + key);
}
}
XmlElement defaults;
if (dict.TryGetValue("Defaults", out defaults))
{
Defaults.Load(defaults);
CalculateOverrides();
ConfigChanged("Defaults");
foreach (var key in ToChildDictionary(defaults).Keys)
{
ConfigChanged("Defaults/" + key);
}
}
}
private static void CheckSubtree(XmlElement allowed, XmlElement test, string prefix, List<string> disallowed)
{
prefix = prefix + "/" + test.LocalName;
if (allowed.LocalName != test.LocalName)
{
disallowed.Add(prefix);
return;
}
foreach (var attribute in AttributeNames(test))
{
if (! allowed.HasAttribute(attribute))
{
disallowed.Add(prefix + "/@" + attribute);
}
}
var allowedChildren = ToChildDictionary(allowed);
foreach (var t in test.ChildNodes)
{
var testChild = t as XmlElement;
if (testChild == null)
continue;
XmlElement allowedChild;
if (! allowedChildren.TryGetValue(testChild.LocalName, out allowedChild))
{
disallowed.Add(prefix + "/" + testChild.LocalName);
}
else
{
CheckSubtree(allowedChild, testChild, prefix, disallowed);
}
}
}
private static Dictionary<string, XmlElement> ToChildDictionary(XmlElement xml)
{
var result = new Dictionary<string, XmlElement>();
foreach (var c in xml.ChildNodes)
{
var child = c as XmlElement;
if (child == null)
continue;
result[child.LocalName] = child;
}
return result;
}
private static IEnumerable<string> AttributeNames(XmlElement element)
{
foreach (var a in element.Attributes)
{
var attr = a as XmlAttribute;
if (attr != null)
yield return attr.LocalName;
}
}
internal void OnConfigChange(string path, Action action, bool invokeNow = true)
{
List<Action> list;
if (listeners.TryGetValue(path, out list))
list.Add(action);
else
listeners.Add(path, new List<Action> { action });
if (invokeNow)
action();
}
internal void ConfigChanged(string path)
{
List<Action> list;
if (!listeners.TryGetValue(path, out list)) return;
foreach (var action in list)
action();
}
/// <summary>
/// Prints the current config for a given silo.
/// </summary>
/// <param name="siloName">The name of the silo to print its configuration.</param>
/// <returns></returns>
public string ToString(string siloName)
{
var sb = new StringBuilder();
sb.Append("Config File Name: ").AppendLine(string.IsNullOrEmpty(SourceFile) ? "" : Path.GetFullPath(SourceFile));
sb.Append("Host: ").AppendLine(Dns.GetHostName());
sb.Append("Start time: ").AppendLine(LogFormatter.PrintDate(DateTime.UtcNow));
sb.Append("Primary node: ").AppendLine(PrimaryNode == null ? "null" : PrimaryNode.ToString());
sb.AppendLine("Platform version info:").Append(ConfigUtilities.RuntimeVersionInfo());
sb.AppendLine("Global configuration:").Append(Globals.ToString());
NodeConfiguration nc;
if (TryGetNodeConfigurationForSilo(siloName, out nc))
{
sb.AppendLine("Silo configuration:").Append(nc);
}
sb.AppendLine();
return sb.ToString();
}
internal static async Task<IPAddress> ResolveIPAddress(string addrOrHost, byte[] subnet, AddressFamily family)
{
var loopback = (family == AddressFamily.InterNetwork) ? IPAddress.Loopback : IPAddress.IPv6Loopback;
if (addrOrHost.Equals("loopback", StringComparison.OrdinalIgnoreCase) ||
addrOrHost.Equals("localhost", StringComparison.OrdinalIgnoreCase) ||
addrOrHost.Equals("127.0.0.1", StringComparison.OrdinalIgnoreCase))
{
return loopback;
}
else if (addrOrHost == "0.0.0.0")
{
return IPAddress.Any;
}
else
{
// IF the address is an empty string, default to the local machine, but not the loopback address
if (String.IsNullOrEmpty(addrOrHost))
{
addrOrHost = Dns.GetHostName();
// If for some reason we get "localhost" back. This seems to have happened to somebody.
if (addrOrHost.Equals("localhost", StringComparison.OrdinalIgnoreCase))
return loopback;
}
var candidates = new List<IPAddress>();
IPAddress[] nodeIps = await Dns.GetHostAddressesAsync(addrOrHost);
foreach (var nodeIp in nodeIps)
{
if (nodeIp.AddressFamily != family || nodeIp.Equals(loopback)) continue;
// If the subnet does not match - we can't resolve this address.
// If subnet is not specified - pick smallest address deterministically.
if (subnet == null)
{
candidates.Add(nodeIp);
}
else
{
IPAddress ip = nodeIp;
if (subnet.Select((b, i) => ip.GetAddressBytes()[i] == b).All(x => x))
{
candidates.Add(nodeIp);
}
}
}
if (candidates.Count > 0)
{
return PickIPAddress(candidates);
}
var subnetStr = Utils.EnumerableToString(subnet, null, ".", false);
throw new ArgumentException("Hostname '" + addrOrHost + "' with subnet " + subnetStr + " and family " + family + " is not a valid IP address or DNS name");
}
}
private static IPAddress PickIPAddress(IReadOnlyList<IPAddress> candidates)
{
IPAddress chosen = null;
foreach (IPAddress addr in candidates)
{
if (chosen == null)
{
chosen = addr;
}
else
{
if(CompareIPAddresses(addr, chosen)) // pick smallest address deterministically
chosen = addr;
}
}
return chosen;
}
// returns true if lhs is "less" (in some repeatable sense) than rhs
private static bool CompareIPAddresses(IPAddress lhs, IPAddress rhs)
{
byte[] lbytes = lhs.GetAddressBytes();
byte[] rbytes = rhs.GetAddressBytes();
if (lbytes.Length != rbytes.Length) return lbytes.Length < rbytes.Length;
// compare starting from most significant octet.
// 10.68.20.21 < 10.98.05.04
for (int i = 0; i < lbytes.Length; i++)
{
if (lbytes[i] != rbytes[i])
{
return lbytes[i] < rbytes[i];
}
}
// They're equal
return false;
}
/// <summary>
/// Gets the address of the local server.
/// If there are multiple addresses in the correct family in the server's DNS record, the first will be returned.
/// </summary>
/// <returns>The server's IPv4 address.</returns>
internal static IPAddress GetLocalIPAddress(AddressFamily family = AddressFamily.InterNetwork, string interfaceName = null)
{
var loopback = (family == AddressFamily.InterNetwork) ? IPAddress.Loopback : IPAddress.IPv6Loopback;
// get list of all network interfaces
NetworkInterface[] netInterfaces = NetworkInterface.GetAllNetworkInterfaces();
var candidates = new List<IPAddress>();
// loop through interfaces
for (int i=0; i < netInterfaces.Length; i++)
{
NetworkInterface netInterface = netInterfaces[i];
if (netInterface.OperationalStatus != OperationalStatus.Up)
{
// Skip network interfaces that are not operational
continue;
}
if (!string.IsNullOrWhiteSpace(interfaceName) &&
!netInterface.Name.StartsWith(interfaceName, StringComparison.Ordinal)) continue;
bool isLoopbackInterface = (netInterface.NetworkInterfaceType == NetworkInterfaceType.Loopback);
// get list of all unicast IPs from current interface
UnicastIPAddressInformationCollection ipAddresses = netInterface.GetIPProperties().UnicastAddresses;
// loop through IP address collection
foreach (UnicastIPAddressInformation ip in ipAddresses)
{
if (ip.Address.AddressFamily == family) // Picking the first address of the requested family for now. Will need to revisit later
{
//don't pick loopback address, unless we were asked for a loopback interface
if(!(isLoopbackInterface && ip.Address.Equals(loopback)))
{
candidates.Add(ip.Address); // collect all candidates.
}
}
}
}
if (candidates.Count > 0) return PickIPAddress(candidates);
throw new OrleansException("Failed to get a local IP address.");
}
private static XmlElement ParseXml(TextReader input)
{
var doc = new XmlDocument();
var xmlReader = XmlReader.Create(input);
doc.Load(xmlReader);
return doc.DocumentElement;
}
/// <summary>
/// Returns a prepopulated ClusterConfiguration object for a primary local silo (for testing)
/// </summary>
/// <param name="siloPort">TCP port for silo to silo communication</param>
/// <param name="gatewayPort">Client gateway TCP port</param>
/// <returns>ClusterConfiguration object that can be passed to Silo or SiloHost classes for initialization</returns>
public static ClusterConfiguration LocalhostPrimarySilo(int siloPort = 22222, int gatewayPort = 40000)
{
var config = new ClusterConfiguration();
var siloAddress = new IPEndPoint(IPAddress.Loopback, siloPort);
config.Globals.LivenessType = GlobalConfiguration.LivenessProviderType.MembershipTableGrain;
config.Globals.SeedNodes.Add(siloAddress);
config.Globals.ReminderServiceType = GlobalConfiguration.ReminderServiceProviderType.ReminderTableGrain;
config.Defaults.HostNameOrIPAddress = "localhost";
config.Defaults.Port = siloPort;
config.Defaults.ProxyGatewayEndpoint = new IPEndPoint(IPAddress.Loopback, gatewayPort);
config.PrimaryNode = siloAddress;
return config;
}
}
}
| |
using System;
using System.Diagnostics.CodeAnalysis;
using System.Threading.Tasks;
#if NET_45
using System.Collections.Generic;
#endif
namespace Octokit
{
/// <summary>
/// A client for GitHub's Repositories API.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/">Repositories API documentation</a> for more details.
/// </remarks>
public interface IRepositoriesClient
{
/// <summary>
/// Client for managing pull requests.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/pulls/">Pull Requests API documentation</a> for more details
/// </remarks>
IPullRequestsClient PullRequest { get; }
/// <summary>
/// Client for managing branches in a repository.
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/branches/">Branches API documentation</a> for more details
/// </remarks>
[SuppressMessage("Microsoft.Naming", "CA1721:PropertyNamesShouldNotMatchGetMethods")]
IRepositoryBranchesClient Branch { get; }
/// <summary>
/// Client for managing commit comments in a repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/comments/">Repository Comments API documentation</a> for more information.
/// </remarks>
IRepositoryCommentsClient Comment { get; }
/// <summary>
/// Client for managing deploy keys in a repository.
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/keys/">Repository Deploy Keys API documentation</a> for more information.
/// </remarks>
IRepositoryDeployKeysClient DeployKeys { get; }
/// <summary>
/// Client for managing the contents of a repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/contents/">Repository Contents API documentation</a> for more information.
/// </remarks>
IRepositoryContentsClient Content { get; }
/// <summary>
/// Creates a new repository for the current user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#create">API documentation</a> for more information.
/// </remarks>
/// <param name="newRepository">A <see cref="NewRepository"/> instance describing the new repository to create</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="Repository"/> instance for the created repository.</returns>
Task<Repository> Create(NewRepository newRepository);
/// <summary>
/// Creates a new repository in the specified organization.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#create">API documentation</a> for more information.
/// </remarks>
/// <param name="organizationLogin">Login of the organization in which to create the repository</param>
/// <param name="newRepository">A <see cref="NewRepository"/> instance describing the new repository to create</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="Repository"/> instance for the created repository</returns>
Task<Repository> Create(string organizationLogin, NewRepository newRepository);
/// <summary>
/// Deletes the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#delete-a-repository">API documentation</a> for more information.
/// Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required.
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
Task Delete(string owner, string name);
/// <summary>
/// Deletes the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#delete-a-repository">API documentation</a> for more information.
/// Deleting a repository requires admin access. If OAuth is used, the `delete_repo` scope is required.
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
Task Delete(long repositoryId);
/// <summary>
/// Gets the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#get">API documentation</a> for more information.
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="Repository"/></returns>
[SuppressMessage("Microsoft.Naming", "CA1716:IdentifiersShouldNotMatchKeywords", MessageId = "Get")]
Task<Repository> Get(string owner, string name);
/// <summary>
/// Gets the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#get">API documentation</a> for more information.
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="Repository"/></returns>
[SuppressMessage("Microsoft.Naming", "CA1716:IdentifiersShouldNotMatchKeywords", MessageId = "Get")]
Task<Repository> Get(long repositoryId);
/// <summary>
/// Gets all public repositories.
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/#list-all-public-repositories">API documentation</a> for more information.
/// The default page size on GitHub.com is 30.
/// </remarks>
/// <exception cref="AuthorizationException">Thrown if the client is not authenticated.</exception>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
[SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate",
Justification = "Makes a network request")]
Task<IReadOnlyList<Repository>> GetAllPublic();
/// <summary>
/// Gets all public repositories since the integer Id of the last Repository that you've seen.
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/#list-all-public-repositories">API documentation</a> for more information.
/// The default page size on GitHub.com is 30.
/// </remarks>
/// <param name="request">Search parameters of the last repository seen</param>
/// <exception cref="AuthorizationException">Thrown if the client is not authenticated.</exception>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllPublic(PublicRepositoryRequest request);
/// <summary>
/// Gets all repositories owned by the current user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-your-repositories">API documentation</a> for more information.
/// The default page size on GitHub.com is 30.
/// </remarks>
/// <exception cref="AuthorizationException">Thrown if the client is not authenticated.</exception>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
[SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate",
Justification = "Makes a network request")]
Task<IReadOnlyList<Repository>> GetAllForCurrent();
/// <summary>
/// Gets all repositories owned by the current user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-your-repositories">API documentation</a> for more information.
/// </remarks>
/// <param name="options">Options for changing the API response</param>
/// <exception cref="AuthorizationException">Thrown if the client is not authenticated.</exception>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForCurrent(ApiOptions options);
/// <summary>
/// Gets all repositories owned by the current user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-your-repositories">API documentation</a> for more information.
/// The default page size on GitHub.com is 30.
/// </remarks>
/// <param name="request">Search parameters to filter results on</param>
/// <exception cref="AuthorizationException">Thrown if the client is not authenticated.</exception>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForCurrent(RepositoryRequest request);
/// <summary>
/// Gets all repositories owned by the current user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-your-repositories">API documentation</a> for more information.
/// </remarks>
/// <param name="request">Search parameters to filter results on</param>
/// <param name="options">Options for changing the API response</param>
/// <exception cref="AuthorizationException">Thrown if the client is not authenticated.</exception>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForCurrent(RepositoryRequest request, ApiOptions options);
/// <summary>
/// Gets all repositories owned by the specified user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-user-repositories">API documentation</a> for more information.
/// The default page size on GitHub.com is 30.
/// </remarks>
/// <param name="login">The account name to search for</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForUser(string login);
/// <summary>
/// Gets all repositories owned by the specified user.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-user-repositories">API documentation</a> for more information.
/// </remarks>
/// <param name="login">The account name to search for</param>
/// <param name="options">Options for changing the API response</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForUser(string login, ApiOptions options);
/// <summary>
/// Gets all repositories owned by the specified organization.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-organization-repositories">API documentation</a> for more information.
/// The default page size on GitHub.com is 30.
/// </remarks>
/// <param name="organization">The organization name to search for</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForOrg(string organization);
/// <summary>
/// Gets all repositories owned by the specified organization.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-organization-repositories">API documentation</a> for more information.
/// </remarks>
/// <param name="organization">The organization name to search for</param>
/// <param name="options">Options for changing the API response</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>A <see cref="IReadOnlyPagedCollection{Repository}"/> of <see cref="Repository"/>.</returns>
Task<IReadOnlyList<Repository>> GetAllForOrg(string organization, ApiOptions options);
/// <summary>
/// A client for GitHub's Commit Status API.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/statuses/">Commit Status API documentation</a> for more
/// details. Also check out the <a href="https://github.com/blog/1227-commit-status-api">blog post</a>
/// that announced this feature.
/// </remarks>
ICommitStatusClient Status { get; }
/// <summary>
/// A client for GitHub's Repository Hooks API.
/// </summary>
/// <remarks>See <a href="http://developer.github.com/v3/repos/hooks/">Hooks API documentation</a> for more information.</remarks>
IRepositoryHooksClient Hooks { get; }
/// <summary>
/// A client for GitHub's Repository Forks API.
/// </summary>
/// <remarks>See <a href="http://developer.github.com/v3/repos/forks/">Forks API documentation</a> for more information.</remarks>
IRepositoryForksClient Forks { get; }
/// <summary>
/// A client for GitHub's Repo Collaborators.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/collaborators/">Collaborators API documentation</a> for more details
/// </remarks>
IRepoCollaboratorsClient Collaborator { get; }
/// <summary>
/// Client for GitHub's Repository Deployments API
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/deployments/">Deployments API documentation</a> for more details
/// </remarks>
IDeploymentsClient Deployment { get; }
/// <summary>
/// Client for GitHub's Repository Statistics API
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/statistics/">Statistics API documentation</a> for more details
///</remarks>
IStatisticsClient Statistics { get; }
/// <summary>
/// Client for GitHub's Repository Commits API
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/commits/">Commits API documentation</a> for more details
///</remarks>
IRepositoryCommitsClient Commit { get; }
/// <summary>
/// Access GitHub's Releases API.
/// </summary>
/// <remarks>
/// Refer to the API documentation for more information: https://developer.github.com/v3/repos/releases/
/// </remarks>
IReleasesClient Release { get; }
/// <summary>
/// Client for GitHub's Repository Merging API
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/merging/">Merging API documentation</a> for more details
///</remarks>
IMergingClient Merging { get; }
/// <summary>
/// Gets all the branches for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-branches">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>All <see cref="T:Octokit.Branch"/>es of the repository</returns>
[Obsolete("Please use RepositoriesClient.Branch.GetAll() instead. This method will be removed in a future version")]
Task<IReadOnlyList<Branch>> GetAllBranches(string owner, string name);
/// <summary>
/// Gets all the branches for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-branches">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>All <see cref="T:Octokit.Branch"/>es of the repository</returns>
[Obsolete("Please use RepositoriesClient.Branch.GetAll() instead. This method will be removed in a future version")]
Task<IReadOnlyList<Branch>> GetAllBranches(long repositoryId);
/// <summary>
/// Gets all the branches for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-branches">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>All <see cref="T:Octokit.Branch"/>es of the repository</returns>
[Obsolete("Please use RepositoriesClient.Branch.GetAll() instead. This method will be removed in a future version")]
Task<IReadOnlyList<Branch>> GetAllBranches(string owner, string name, ApiOptions options);
/// <summary>
/// Gets all the branches for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-branches">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <exception cref="ApiException">Thrown when a general API error occurs.</exception>
/// <returns>All <see cref="T:Octokit.Branch"/>es of the repository</returns>
[Obsolete("Please use RepositoriesClient.Branch.GetAll() instead. This method will be removed in a future version")]
Task<IReadOnlyList<Branch>> GetAllBranches(long repositoryId, ApiOptions options);
/// <summary>
/// Gets all contributors for the specified repository. Does not include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(string owner, string name);
/// <summary>
/// Gets all contributors for the specified repository. Does not include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(long repositoryId);
/// <summary>
/// Gets all contributors for the specified repository. Does not include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(string owner, string name, ApiOptions options);
/// <summary>
/// Gets all contributors for the specified repository. Does not include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(long repositoryId, ApiOptions options);
/// <summary>
/// Gets all contributors for the specified repository. With the option to include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="includeAnonymous">True if anonymous contributors should be included in result; Otherwise false</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(string owner, string name, bool includeAnonymous);
/// <summary>
/// Gets all contributors for the specified repository. With the option to include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="includeAnonymous">True if anonymous contributors should be included in result; Otherwise false</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(long repositoryId, bool includeAnonymous);
/// <summary>
/// Gets all contributors for the specified repository. With the option to include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="includeAnonymous">True if anonymous contributors should be included in result; Otherwise false</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(string owner, string name, bool includeAnonymous, ApiOptions options);
/// <summary>
/// Gets all contributors for the specified repository. With the option to include anonymous contributors.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-contributors">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="includeAnonymous">True if anonymous contributors should be included in result; Otherwise false</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All contributors of the repository.</returns>
Task<IReadOnlyList<RepositoryContributor>> GetAllContributors(long repositoryId, bool includeAnonymous, ApiOptions options);
/// <summary>
/// Gets all languages for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-languages">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <returns>All languages used in the repository and the number of bytes of each language.</returns>
Task<IReadOnlyList<RepositoryLanguage>> GetAllLanguages(string owner, string name);
/// <summary>
/// Gets all languages for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-languages">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <returns>All languages used in the repository and the number of bytes of each language.</returns>
Task<IReadOnlyList<RepositoryLanguage>> GetAllLanguages(long repositoryId);
/// <summary>
/// Gets all teams for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-teams">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <returns>All <see cref="T:Octokit.Team"/>s associated with the repository</returns>
Task<IReadOnlyList<Team>> GetAllTeams(string owner, string name);
/// <summary>
/// Gets all teams for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-teams">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <returns>All <see cref="T:Octokit.Team"/>s associated with the repository</returns>
Task<IReadOnlyList<Team>> GetAllTeams(long repositoryId);
/// <summary>
/// Gets all teams for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-teams">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All <see cref="T:Octokit.Team"/>s associated with the repository</returns>
Task<IReadOnlyList<Team>> GetAllTeams(string owner, string name, ApiOptions options);
/// <summary>
/// Gets all teams for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-teams">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All <see cref="T:Octokit.Team"/>s associated with the repository</returns>
Task<IReadOnlyList<Team>> GetAllTeams(long repositoryId, ApiOptions options);
/// <summary>
/// Gets all tags for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-tags">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <returns>All of the repositories tags.</returns>
Task<IReadOnlyList<RepositoryTag>> GetAllTags(string owner, string name);
/// <summary>
/// Gets all tags for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-tags">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <returns>All of the repositories tags.</returns>
Task<IReadOnlyList<RepositoryTag>> GetAllTags(long repositoryId);
/// <summary>
/// Gets all tags for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-tags">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All of the repositories tags.</returns>
Task<IReadOnlyList<RepositoryTag>> GetAllTags(string owner, string name, ApiOptions options);
/// <summary>
/// Gets all tags for the specified repository.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#list-tags">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="options">Options for changing the API response</param>
/// <returns>All of the repositories tags.</returns>
Task<IReadOnlyList<RepositoryTag>> GetAllTags(long repositoryId, ApiOptions options);
/// <summary>
/// Gets the specified branch.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#get-branch">API documentation</a> for more details
/// </remarks>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="branchName">The name of the branch</param>
/// <returns>The specified <see cref="T:Octokit.Branch"/></returns>
[Obsolete("Please use RepositoriesClient.Branch.Get() instead. This method will be removed in a future version")]
Task<Branch> GetBranch(string owner, string name, string branchName);
/// <summary>
/// Gets the specified branch.
/// </summary>
/// <remarks>
/// See the <a href="http://developer.github.com/v3/repos/#get-branch">API documentation</a> for more details
/// </remarks>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="branchName">The name of the branch</param>
/// <returns>The specified <see cref="T:Octokit.Branch"/></returns>
[Obsolete("Please use RepositoriesClient.Branch.Get() instead. This method will be removed in a future version")]
Task<Branch> GetBranch(long repositoryId, string branchName);
/// <summary>
/// Updates the specified repository with the values given in <paramref name="update"/>
/// </summary>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="update">New values to update the repository with</param>
/// <returns>The updated <see cref="T:Octokit.Repository"/></returns>
Task<Repository> Edit(string owner, string name, RepositoryUpdate update);
/// <summary>
/// Updates the specified repository with the values given in <paramref name="update"/>
/// </summary>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="update">New values to update the repository with</param>
/// <returns>The updated <see cref="T:Octokit.Repository"/></returns>
Task<Repository> Edit(long repositoryId, RepositoryUpdate update);
/// <summary>
/// Edit the specified branch with the values given in <paramref name="update"/>
/// </summary>
/// <param name="owner">The owner of the repository</param>
/// <param name="name">The name of the repository</param>
/// <param name="branch">The name of the branch</param>
/// <param name="update">New values to update the branch with</param>
/// <returns>The updated <see cref="T:Octokit.Branch"/></returns>
[Obsolete("This existing implementation will cease to work when the Branch Protection API preview period ends. Please use the RepositoryBranchesClient methods instead.")]
Task<Branch> EditBranch(string owner, string name, string branch, BranchUpdate update);
/// <summary>
/// Edit the specified branch with the values given in <paramref name="update"/>
/// </summary>
/// <param name="repositoryId">The Id of the repository</param>
/// <param name="branch">The name of the branch</param>
/// <param name="update">New values to update the branch with</param>
/// <returns>The updated <see cref="T:Octokit.Branch"/></returns>
[Obsolete("This existing implementation will cease to work when the Branch Protection API preview period ends. Please use the RepositoryBranchesClient methods instead.")]
Task<Branch> EditBranch(long repositoryId, string branch, BranchUpdate update);
/// <summary>
/// A client for GitHub's Repository Pages API.
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/pages/">Repository Pages API documentation</a> for more information.
/// </remarks>
IRepositoryPagesClient Page { get; }
/// <summary>
/// A client for GitHub's Repository Invitations API.
/// </summary>
/// <remarks>
/// See the <a href="https://developer.github.com/v3/repos/invitations/">Repository Invitations API documentation</a> for more information.
/// </remarks>
IRepositoryInvitationsClient Invitation { get; }
/// <summary>
/// Access GitHub's Repository Traffic API
/// </summary>
/// <remarks>
/// Refer to the API documentation for more information: https://developer.github.com/v3/repos/traffic/
/// </remarks>
IRepositoryTrafficClient Traffic { get; }
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace BookReviews.WebAPI.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace JeffBot2LAPI.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using Memoria;
using Memoria.Data;
using UnityEngine;
using Object = System.Object;
public class BattleSPSSystem : MonoBehaviour
{
public List<BattleSPS> GetSPSList()
{
return this._spsList;
}
public void Init()
{
this.rot = new Vector3(0f, 0f, 0f);
this._isReady = false;
this._spsList = new List<BattleSPS>();
this._specialSpsList = new List<BattleSPS>();
this._specialSpsFadingList = new List<float>();
this._specialSpsRemovingList = new List<bool>();
this._spsBinDict = new Dictionary<Int32, KeyValuePair<Int32, Byte[]>>();
for (Int32 i = 0; i < 96; i++)
{
GameObject gameObject = new GameObject("SPS_" + i.ToString("D4"));
gameObject.transform.parent = base.transform;
gameObject.transform.localScale = Vector3.one;
gameObject.transform.localPosition = Vector3.zero;
MeshRenderer meshRenderer = gameObject.AddComponent<MeshRenderer>();
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
BattleSPS battleSPS = gameObject.AddComponent<BattleSPS>();
battleSPS.Init();
battleSPS.spsIndex = i;
battleSPS.spsTransform = gameObject.transform;
battleSPS.meshRenderer = meshRenderer;
battleSPS.meshFilter = meshFilter;
this._spsList.Add(battleSPS);
}
this.MapName = FF9StateSystem.Field.SceneName;
this._isReady = this._loadSPSTexture();
}
public void Service()
{
if (!this._isReady)
{
return;
}
for (Int32 i = 0; i < this._spsList.Count; i++)
{
BattleSPS battleSPS = this._spsList[i];
if ((battleSPS.type != 0 || battleSPS.spsBin != null) && (battleSPS.attr & 1) != 0)
{
if (battleSPS.lastFrame != -1)
{
battleSPS.lastFrame = battleSPS.curFrame;
battleSPS.curFrame += battleSPS.frameRate;
if (battleSPS.curFrame >= battleSPS.frameCount)
{
battleSPS.curFrame = 0;
}
else if (battleSPS.curFrame < 0)
{
battleSPS.curFrame = (battleSPS.frameCount >> 4) - 1 << 4;
}
}
}
}
for (Int32 i = 0; i < this._specialSpsList.Count; i++)
{
BattleSPS special_sps = this._specialSpsList[i];
if ((special_sps.type != 0 || special_sps.spsBin != null) && (special_sps.attr & 1) != 0 && special_sps.lastFrame != -1 && special_sps.isUpdate)
{
special_sps.lastFrame = special_sps.curFrame;
special_sps.curFrame += special_sps.frameRate;
if (special_sps.curFrame >= special_sps.frameCount)
{
special_sps.curFrame = 0;
}
else if (special_sps.curFrame < 0)
{
special_sps.curFrame = (special_sps.frameCount >> 4) - 1 << 4;
}
}
}
}
public void GenerateSPS()
{
if (!this._isReady)
{
return;
}
for (Int32 i = 0; i < this._spsList.Count; i++)
{
BattleSPS battleSPS = this._spsList[i];
if ((battleSPS.type != 0 || battleSPS.spsBin != null) && (battleSPS.attr & 1) != 0)
{
if (battleSPS.type == 0)
{
if (battleSPS.charTran != (UnityEngine.Object)null && battleSPS.boneTran != (UnityEngine.Object)null)
{
battleSPS.pos = battleSPS.boneTran.position + battleSPS.posOffset;
}
if (battleSPS.isUpdate)
{
battleSPS.meshRenderer.enabled = true;
battleSPS.GenerateSPS();
battleSPS.isUpdate = false;
}
else
{
battleSPS.meshRenderer.enabled = false;
}
}
else
{
battleSPS.AnimateSHP();
}
battleSPS.lastFrame = battleSPS.curFrame;
}
}
bool show_special = false;
for (BTL_DATA next = FF9StateSystem.Battle.FF9Battle.btl_list.next; next != null; next = next.next)
if (next.bi.player == 0 && next.bi.disappear == 0)
show_special = true;
for (Int32 i = 0; i < this._specialSpsList.Count; i++)
{
BattleSPS special_sps = this._specialSpsList[i];
if ((special_sps.type != 0 || special_sps.spsBin != null) && (special_sps.attr & 1) != 0 && special_sps.isUpdate)
{
double rotation_cos = Math.Cos(2 * Math.PI * special_sps.curFrame / 10000) * this._specialSpsFadingList[i]; // 1 turn every 10 seconds
double rotation_sin = Math.Sin(2 * Math.PI * special_sps.curFrame / 10000) * this._specialSpsFadingList[i];
Vector3 rotated_pos = BattleSPSSystem.statusTextures[special_sps.refNo].extraPos;
float tmp = rotated_pos.x;
rotated_pos.x = (float)(rotation_cos * tmp - rotation_sin * rotated_pos.z);
rotated_pos.z = (float)(rotation_sin * tmp + rotation_cos * rotated_pos.z);
for (int j = 0; j < special_sps.shpGo.Length; j++)
special_sps.shpGo[j].transform.localPosition = rotated_pos;
special_sps.isUpdate = show_special;
special_sps.AnimateSHP();
special_sps.lastFrame = special_sps.curFrame;
if (this._specialSpsRemovingList[i])
this._specialSpsFadingList[i] -= 0.05f;
if (this._specialSpsFadingList[i] > 0.0f)
special_sps.isUpdate = true;
else
for (int j = 0; j < special_sps.shpGo.Length; j++)
special_sps.shpGo[j].SetActive(false);
}
}
}
private Boolean _loadSPSTexture()
{
for (Int32 i = 0; i < (Int32)BattleSPSSystem.statusTextures.Length; i++)
{
BattleSPSSystem.SPSTexture spstexture = BattleSPSSystem.statusTextures[i];
for (Int32 j = 0; j < (Int32)spstexture.textures.Length; j++)
{
String str;
if (spstexture.type == "shp")
{
str = String.Concat(new Object[]
{
spstexture.name,
"/",
spstexture.name,
"_",
j + 1
});
}
else
{
str = spstexture.name;
}
String[] pngInfo;
spstexture.textures[j] = AssetManager.Load<Texture2D>("EmbeddedAsset/BattleMap/Status/" + str, out pngInfo, false);
if (spstexture.textures[j] == (UnityEngine.Object)null)
{
spstexture.textures[j] = new Texture2D(0, 0);
}
}
}
return true;
}
private Int32 _GetSpsFrameCount(Byte[] spsBin)
{
return (Int32)(BitConverter.ToUInt16(spsBin, 0) & 32767) << 4;
}
private Boolean _loadSPSBin(Int32 spsNo)
{
if (this._spsBinDict.ContainsKey(spsNo))
{
return true;
}
String[] array = new String[]
{
"st_doku",
"st_mdoku",
"st_slow",
"st_heis",
"st_nemu",
"st_heat",
"st_friz",
"st_rif",
"st_moku",
"st_moum",
"st_meiwa",
"st_basak"
};
String[] spsInfo;
Byte[] bytes = AssetManager.LoadBytes("BattleMap/BattleSPS/" + array[spsNo] + ".sps", out spsInfo, true);
if (bytes == null)
{
return false;
}
Int32 key = this._GetSpsFrameCount(bytes);
this._spsBinDict.Add(spsNo, new KeyValuePair<Int32, Byte[]>(key, bytes));
return true;
}
public void FF9FieldSPSSetObjParm(Int32 ObjNo, Int32 ParmType, Int32 Arg0, Int32 Arg1, Int32 Arg2)
{
BattleSPS battleSPS = this._spsList[ObjNo];
if (ParmType == 130)
{
if (Arg0 != -1)
{
if (this._loadSPSBin(Arg0))
{
battleSPS.spsBin = this._spsBinDict[Arg0].Value;
battleSPS.curFrame = 0;
battleSPS.frameCount = this._spsBinDict[Arg0].Key;
}
battleSPS.refNo = Arg0;
}
else
{
battleSPS.spsBin = null;
battleSPS.meshRenderer.enabled = false;
}
}
else if (ParmType == 131)
{
if (Arg1 != 0)
{
BattleSPS battleSPS2 = battleSPS;
battleSPS2.attr = (Byte)(battleSPS2.attr | (Byte)Arg0);
}
else
{
BattleSPS battleSPS3 = battleSPS;
battleSPS3.attr = (Byte)(battleSPS3.attr & (Byte)(~(Byte)Arg0));
}
if ((battleSPS.attr & 1) == 0)
{
battleSPS.meshRenderer.enabled = false;
}
else
{
battleSPS.meshRenderer.enabled = true;
}
}
else if (ParmType == 135)
{
battleSPS.pos = new Vector3((Single)Arg0, (Single)(Arg1 * -1), (Single)Arg2);
}
else if (ParmType == 140)
{
battleSPS.rot = new Vector3((Single)Arg0 / 4096f * 360f, (Single)Arg1 / 4096f * 360f, (Single)Arg2 / 4096f * 360f);
}
else if (ParmType == 145)
{
battleSPS.scale = Arg0;
}
else if (ParmType == 150)
{
Obj objUID = PersistenSingleton<EventEngine>.Instance.GetObjUID(Arg0);
battleSPS.charNo = Arg0;
battleSPS.boneNo = Arg1;
battleSPS.charTran = objUID.go.transform;
battleSPS.boneTran = objUID.go.transform.GetChildByName("bone" + battleSPS.boneNo.ToString("D3"));
}
else if (ParmType == 155)
{
battleSPS.fade = (Byte)Arg0;
}
else if (ParmType == 156)
{
battleSPS.arate = (Byte)Arg0;
}
else if (ParmType == 160)
{
battleSPS.frameRate = Arg0;
}
else if (ParmType == 161)
{
battleSPS.curFrame = Arg0 << 4;
}
else if (ParmType == 165)
{
battleSPS.posOffset = new Vector3((Single)Arg0, (Single)(-(Single)Arg1), (Single)Arg2);
}
else if (ParmType == 170)
{
battleSPS.depthOffset = Arg0;
}
}
public void SetBtlStatus(Int32 ObjNo, Int32 StatusNo, Byte abr = 0, Int32 type = 0)
{
BattleSPS battleSPS = this._spsList[ObjNo];
if (StatusNo != -1)
{
battleSPS.type = type;
if (type == 0)
{
if (this._loadSPSBin(StatusNo))
{
battleSPS.spsBin = this._spsBinDict[StatusNo].Value;
battleSPS.curFrame = 0;
battleSPS.frameCount = this._spsBinDict[StatusNo].Key;
battleSPS.arate = abr;
BattleSPS battleSPS2 = battleSPS;
battleSPS2.attr = (Byte)(battleSPS2.attr & 254);
if ((battleSPS.attr & 1) == 0)
{
battleSPS.meshRenderer.enabled = false;
}
battleSPS.refNo = StatusNo;
battleSPS.spsScale = BattleSPSSystem.statusTextures[battleSPS.refNo].spsScale;
battleSPS.spsDistance = BattleSPSSystem.statusTextures[battleSPS.refNo].spsDistance;
}
}
else
{
battleSPS.refNo = StatusNo;
if (battleSPS.shpGo == null)
{
battleSPS.GenerateSHP();
}
}
}
else
{
battleSPS.spsBin = null;
battleSPS.meshRenderer.enabled = false;
if (battleSPS.type == 1)
{
battleSPS.type = 0;
for (Int32 i = 0; i < (Int32)battleSPS.shpGo.Length; i++)
{
battleSPS.shpGo[i].SetActive(false);
}
}
}
}
public static Quaternion QuaternionFromMatrix(Matrix4x4 m)
{
Quaternion result = default(Quaternion);
result.w = Mathf.Sqrt(Mathf.Max(0f, 1f + m[0, 0] + m[1, 1] + m[2, 2])) / 2f;
result.x = Mathf.Sqrt(Mathf.Max(0f, 1f + m[0, 0] - m[1, 1] - m[2, 2])) / 2f;
result.y = Mathf.Sqrt(Mathf.Max(0f, 1f - m[0, 0] + m[1, 1] - m[2, 2])) / 2f;
result.z = Mathf.Sqrt(Mathf.Max(0f, 1f - m[0, 0] - m[1, 1] + m[2, 2])) / 2f;
result.x *= Mathf.Sign(result.x * (m[2, 1] - m[1, 2]));
result.y *= Mathf.Sign(result.y * (m[0, 2] - m[2, 0]));
result.z *= Mathf.Sign(result.z * (m[1, 0] - m[0, 1]));
return result;
}
public void UpdateBtlStatus(BTL_DATA btl, BattleStatus status, Vector3 pos, Vector3 rot, Int32 frame)
{
Int32 objSpsIndex = this.GetObjSpsIndex(btl, status);
BattleSPS battleSPS = this._spsList[objSpsIndex];
battleSPS.pos = new Vector3(pos.x, pos.y, pos.z);
battleSPS.curFrame = frame << 4;
if ((battleSPS.attr & 1) == 0)
{
BattleSPS battleSPS2 = battleSPS;
battleSPS2.attr = (Byte)(battleSPS2.attr | 1);
}
battleSPS.isUpdate = true;
}
public Int32 GetStatusSPSIndex(BattleStatus status)
{
Int32 result = 0;
for (Int32 i = 0; i < (Int32)btl2d.wStatIconTbl.Length; i++)
{
if (btl2d.wStatIconTbl[i].Mask == status)
{
result = i;
break;
}
}
return result;
}
public Int32 GetObjSpsIndex(BTL_DATA btl, BattleStatus status)
{
Int32 statusSPSIndex = this.GetStatusSPSIndex(status);
return (Int32)(btl.bi.line_no * 12) + statusSPSIndex;
}
public void AddBtlSPSObj(BattleUnit btl, BattleStatus status)
{
Int32 statusSPSIndex = this.GetStatusSPSIndex(status);
Int32 objSpsIndex = this.GetObjSpsIndex(btl.Data, status);
btl2d.STAT_ICON_TBL stat_ICON_TBL = btl2d.wStatIconTbl[statusSPSIndex];
this.SetBtlStatus(objSpsIndex, statusSPSIndex, stat_ICON_TBL.Abr, (Int32)stat_ICON_TBL.Type);
}
public void RemoveBtlSPSObj(BTL_DATA btl, BattleStatus status)
{
Int32 statusSPSIndex = this.GetStatusSPSIndex(status);
Int32 objSpsIndex = this.GetObjSpsIndex(btl, status);
btl2d.STAT_ICON_TBL stat_ICON_TBL = btl2d.wStatIconTbl[statusSPSIndex];
this.SetBtlStatus(objSpsIndex, -1, stat_ICON_TBL.Abr, (Int32)stat_ICON_TBL.Type);
}
public void SetActiveSHP(Boolean active)
{
for (Int32 i = 0; i < this._spsList.Count; i++)
{
BattleSPS battleSPS = this._spsList[i];
if (battleSPS.shpGo == null)
{
return;
}
for (Int32 j = 0; j < (Int32)battleSPS.shpGo.Length; j++)
{
battleSPS.shpGo[j].SetActive(active);
}
}
}
public void AddSpecialSPSObj(int specialid, uint spstype, Vector3 pos, float scale)
{
BattleSPS special_sps;
if (specialid < 0 || specialid > _specialSpsList.Count)
specialid = _specialSpsList.Count;
if (specialid == _specialSpsList.Count)
{
GameObject gameObject = new GameObject("SpecialSPS_" + specialid.ToString("D4"));
gameObject.transform.parent = base.transform;
gameObject.transform.localScale = Vector3.one;
gameObject.transform.localPosition = Vector3.zero;
MeshRenderer meshRenderer = gameObject.AddComponent<MeshRenderer>();
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
special_sps = gameObject.AddComponent<BattleSPS>();
special_sps.Init();
special_sps.spsIndex = specialid;
special_sps.spsTransform = gameObject.transform;
special_sps.meshRenderer = meshRenderer;
special_sps.meshFilter = meshFilter;
this._specialSpsList.Add(special_sps);
this._specialSpsFadingList.Add(1.0f);
this._specialSpsRemovingList.Add(false);
}
else
{
special_sps = this._specialSpsList[specialid];
this._specialSpsFadingList[specialid] = 1.0f;
this._specialSpsRemovingList[specialid] = false;
}
special_sps.pos = pos;
special_sps.curFrame = 0;
special_sps.lastFrame = 0;
special_sps.frameCount = 10000;
special_sps.attr |= 1;
special_sps.isUpdate = true;
special_sps.refNo = (int)spstype;
special_sps.type = (BattleSPSSystem.statusTextures[(int)spstype].type.Equals("shp") ? 1 : 0);
special_sps.scale = (int)(scale * 4096);
if (special_sps.shpGo == null)
special_sps.GenerateSHP();
}
public void RemoveSpecialSPSObj(int specialid)
{
if (specialid < 0 || specialid >= _specialSpsList.Count)
return;
this._specialSpsRemovingList[specialid] = true;
}
public String MapName;
private Boolean _isReady;
private List<BattleSPS> _spsList;
// Custom fields: special SPS effects
private List<BattleSPS> _specialSpsList;
private List<float> _specialSpsFadingList;
private List<bool> _specialSpsRemovingList;
private Dictionary<Int32, KeyValuePair<Int32, Byte[]>> _spsBinDict;
public Vector3 rot;
public static BattleSPSSystem.SPSTexture[] statusTextures = new BattleSPSSystem.SPSTexture[]
{
new BattleSPSSystem.SPSTexture("poison", "sps", 1, Vector3.zero, 6f, 4f),
new BattleSPSSystem.SPSTexture("venom", "sps", 1, Vector3.zero, 2f, 1.5f),
new BattleSPSSystem.SPSTexture("slow", "shp", 6, new Vector3(212f, 0f, 0f), 4f, 5f),
new BattleSPSSystem.SPSTexture("haste", "shp", 6, new Vector3(-148f, 0f, 0f), 4f, 5f),
new BattleSPSSystem.SPSTexture("sleep", "sps", 1, Vector3.zero, 2.5f, 4.5f),
new BattleSPSSystem.SPSTexture("heat", "sps", 1, Vector3.zero, 4f, 5f),
new BattleSPSSystem.SPSTexture("freeze", "sps", 1, Vector3.zero, 4f, 5f),
new BattleSPSSystem.SPSTexture("reflect", "sps", 1, Vector3.zero, 3f, 3f),
new BattleSPSSystem.SPSTexture("silence", "shp", 3, new Vector3(-92f, 0f, 0f), 4f, 5f),
new BattleSPSSystem.SPSTexture("blind", "sps", 1, Vector3.zero, 5f, 5.5f),
new BattleSPSSystem.SPSTexture("trouble", "shp", 4, new Vector3(92f, 0f, 0f), 4f, 5f),
new BattleSPSSystem.SPSTexture("berserk", "sps", 1, Vector3.zero, 3f, 2f),
new BattleSPSSystem.SPSTexture("customfireorb", "shp", 3, new Vector3(400f, 0f, 0f), 5f, 5f),
new BattleSPSSystem.SPSTexture("customthunderorb", "shp", 4, new Vector3(-200f, 0f, -346.41f), 5f, 5f),
new BattleSPSSystem.SPSTexture("customiceorb", "shp", 4, new Vector3(-200f, 0f, 346.41f), 5f, 5f)
};
public class SPSTexture
{
public SPSTexture(String name, String type, Int32 textureNum, Vector3 extraPos, Single scale = 4f, Single distance = 5f)
{
this.name = name;
this.type = type;
this.textures = new Texture2D[textureNum];
this.extraPos = extraPos;
this.spsScale = scale;
this.spsDistance = distance;
}
public String name;
public String type;
public Texture2D[] textures;
public Vector3 extraPos;
public Single spsScale;
public Single spsDistance;
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Composition;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.Editor.Host;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Internal.Log;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.Suggestions
{
[ExportWorkspaceServiceFactory(typeof(IFixAllGetFixesService), ServiceLayer.Host), Shared]
internal class FixAllGetFixesService : IFixAllGetFixesService, IWorkspaceServiceFactory
{
private readonly IWaitIndicator _waitIndicator;
[ImportingConstructor]
public FixAllGetFixesService(IWaitIndicator waitIndicator)
{
_waitIndicator = waitIndicator;
}
public IWorkspaceService CreateService(HostWorkspaceServices workspaceServices)
{
return this;
}
public async Task<Solution> GetFixAllChangedSolutionAsync(FixAllProvider fixAllProvider, FixAllContext fixAllContext, string fixAllTitle, string waitDialogMessage)
{
// Compute fix all occurrences code fix for the given fix all context.
// Bring up a cancellable wait dialog.
bool userCancelled;
var codeAction = GetFixAllCodeAction(fixAllProvider, fixAllContext, fixAllTitle, waitDialogMessage, out userCancelled);
if (codeAction == null)
{
return userCancelled ? null : fixAllContext.Solution;
}
fixAllContext.CancellationToken.ThrowIfCancellationRequested();
return await codeAction.GetChangedSolutionInternalAsync(cancellationToken: fixAllContext.CancellationToken).ConfigureAwait(false);
}
public async Task<IEnumerable<CodeActionOperation>> GetFixAllOperationsAsync(FixAllProvider fixAllProvider, FixAllContext fixAllContext, string fixAllTitle, string waitDialogMessage, bool showPreviewChangesDialog)
{
// Compute fix all occurrences code fix for the given fix all context.
// Bring up a cancellable wait dialog.
bool userCancelled;
var codeAction = GetFixAllCodeAction(fixAllProvider, fixAllContext, fixAllTitle, waitDialogMessage, out userCancelled);
if (codeAction == null)
{
return null;
}
return await GetFixAllOperationsAsync(codeAction, fixAllContext, fixAllTitle, showPreviewChangesDialog).ConfigureAwait(false);
}
private CodeAction GetFixAllCodeAction(FixAllProvider fixAllProvider, FixAllContext fixAllContext, string fixAllTitle, string waitDialogMessage, out bool userCancelled)
{
userCancelled = false;
// Compute fix all occurrences code fix for the given fix all context.
// Bring up a cancellable wait dialog.
CodeAction codeAction = null;
using (Logger.LogBlock(FunctionId.CodeFixes_FixAllOccurrencesComputation, fixAllContext.CancellationToken))
{
var result = _waitIndicator.Wait(
fixAllTitle,
waitDialogMessage,
allowCancel: true,
action: waitContext =>
{
fixAllContext.CancellationToken.ThrowIfCancellationRequested();
using (var linkedCts =
CancellationTokenSource.CreateLinkedTokenSource(waitContext.CancellationToken, fixAllContext.CancellationToken))
{
try
{
var fixAllContextWithCancellation = fixAllContext.WithCancellationToken(linkedCts.Token);
var fixTask = fixAllProvider.GetFixAsync(fixAllContextWithCancellation);
if (fixTask != null)
{
codeAction = fixTask.WaitAndGetResult(linkedCts.Token);
}
}
catch (OperationCanceledException)
{
fixAllContext.CancellationToken.ThrowIfCancellationRequested();
}
}
});
userCancelled = result == WaitIndicatorResult.Canceled;
var cancelled = userCancelled || codeAction == null;
if (cancelled)
{
FixAllLogger.LogComputationResult(completed: false, timedOut: result != WaitIndicatorResult.Canceled);
return null;
}
}
FixAllLogger.LogComputationResult(completed: true);
return codeAction;
}
private async Task<IEnumerable<CodeActionOperation>> GetFixAllOperationsAsync(CodeAction codeAction, FixAllContext fixAllContext, string fixAllPreviewChangesTitle, bool showPreviewChangesDialog)
{
// We have computed the fix all occurrences code fix.
// Now fetch the new solution with applied fix and bring up the Preview changes dialog.
var cancellationToken = fixAllContext.CancellationToken;
var workspace = fixAllContext.Project.Solution.Workspace;
cancellationToken.ThrowIfCancellationRequested();
var operations = await codeAction.GetOperationsAsync(cancellationToken).ConfigureAwait(false);
if (operations == null)
{
return null;
}
cancellationToken.ThrowIfCancellationRequested();
var newSolution = await codeAction.GetChangedSolutionInternalAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
if (showPreviewChangesDialog)
{
newSolution = PreviewChanges(
fixAllContext.Project.Solution,
newSolution,
fixAllPreviewChangesTitle,
codeAction.Title,
fixAllContext.Project.Language,
workspace,
cancellationToken);
if (newSolution == null)
{
return null;
}
}
// Get a code action, with apply changes operation replaced with the newSolution.
return GetNewFixAllOperations(operations, newSolution, cancellationToken);
}
internal static Solution PreviewChanges(
Solution currentSolution,
Solution newSolution,
string fixAllPreviewChangesTitle,
string fixAllTopLevelHeader,
string languageOpt,
Workspace workspace,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
using (Logger.LogBlock(FunctionId.CodeFixes_FixAllOccurrencesPreviewChanges, cancellationToken))
{
var previewService = workspace.Services.GetService<IPreviewDialogService>();
var glyph = languageOpt == null ?
Glyph.Assembly :
languageOpt == LanguageNames.CSharp ? Glyph.CSharpProject : Glyph.BasicProject;
var changedSolution = previewService.PreviewChanges(
string.Format(EditorFeaturesResources.PreviewChangesOf, fixAllPreviewChangesTitle),
"vs.codefix.fixall",
fixAllTopLevelHeader,
fixAllPreviewChangesTitle,
glyph,
newSolution,
currentSolution);
if (changedSolution == null)
{
// User clicked cancel.
FixAllLogger.LogPreviewChangesResult(applied: false);
return null;
}
FixAllLogger.LogPreviewChangesResult(applied: true, allChangesApplied: changedSolution == newSolution);
return changedSolution;
}
}
private IEnumerable<CodeActionOperation> GetNewFixAllOperations(IEnumerable<CodeActionOperation> operations, Solution newSolution, CancellationToken cancellationToken)
{
bool foundApplyChanges = false;
foreach (var operation in operations)
{
cancellationToken.ThrowIfCancellationRequested();
if (!foundApplyChanges)
{
var applyChangesOperation = operation as ApplyChangesOperation;
if (applyChangesOperation != null)
{
foundApplyChanges = true;
yield return new ApplyChangesOperation(newSolution);
continue;
}
}
yield return operation;
}
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Data.Entity.Core.Mapping
{
using System.Data.Entity.Core.Metadata.Edm;
using System.Data.Entity.Resources;
using System.Linq;
using Xunit;
public class AssociationSetMappingTests
{
[Fact]
public void Can_add_get_remove_column_conditions()
{
var entitySet1 = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet1);
Assert.Empty(associationSetMapping.Conditions);
var conditionPropertyMapping
= new ValueConditionMapping(new EdmProperty("C", TypeUsage.Create(new PrimitiveType { DataSpace = DataSpace.SSpace })), 42);
associationSetMapping.AddCondition(conditionPropertyMapping);
Assert.Same(conditionPropertyMapping, associationSetMapping.Conditions.Single());
associationSetMapping.RemoveCondition(conditionPropertyMapping);
Assert.Empty(associationSetMapping.Conditions);
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var conditionPropertyMapping
= new ValueConditionMapping(new EdmProperty("C", TypeUsage.Create(new PrimitiveType { DataSpace = DataSpace.SSpace })), 42);
associationSetMapping.SetReadOnly();
}
[Fact]
public void Can_initialize_with_entity_set()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var associationTypeMapping = associationSetMapping.TypeMappings.Single();
Assert.NotNull(associationTypeMapping);
Assert.Same(associationSet.ElementType, associationTypeMapping.Types.Single());
Assert.Same(associationSetMapping, associationTypeMapping.SetMapping);
var mappingFragment = associationTypeMapping.MappingFragments.Single();
Assert.Same(entitySet, mappingFragment.TableSet);
}
[Fact]
public void Can_get_association_set()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
Assert.Same(associationSet, associationSetMapping.AssociationSet);
}
[Fact]
public void Can_get_and_set_store_entity_set()
{
var entitySet1 = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet1);
Assert.Same(entitySet1, associationSetMapping.StoreEntitySet);
var entitySet2 = new EntitySet();
associationSetMapping.StoreEntitySet = entitySet2;
Assert.Same(entitySet2, associationSetMapping.StoreEntitySet);
}
[Fact]
public void Can_get_table()
{
var entityType = new EntityType("E", "N", DataSpace.CSpace);
var entitySet = new EntitySet("ES", null, null, null, entityType);
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
Assert.Same(entityType, associationSetMapping.Table);
}
[Fact]
public void Can_get_and_set_source_and_target_end_mappings()
{
var entitySet1 = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet1);
Assert.Null(associationSetMapping.SourceEndMapping);
Assert.Null(associationSetMapping.TargetEndMapping);
var sourceEndMapping = new EndPropertyMapping();
associationSetMapping.SourceEndMapping = sourceEndMapping;
Assert.Same(sourceEndMapping, associationSetMapping.SourceEndMapping);
var targetEndMapping = new EndPropertyMapping();
associationSetMapping.TargetEndMapping = targetEndMapping;
Assert.Same(targetEndMapping, associationSetMapping.TargetEndMapping);
}
[Fact]
public void Cannot_set_source_end_mapping_when_read_only()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var sourceEndMapping = new EndPropertyMapping();
associationSetMapping.SetReadOnly();
Assert.Equal(
Strings.OperationOnReadOnlyItem,
Assert.Throws<InvalidOperationException>(
() => (associationSetMapping.SourceEndMapping = sourceEndMapping)).Message);
}
[Fact]
public void Cannot_set_target_end_mapping_when_read_only()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var targetEndMapping = new EndPropertyMapping();
associationSetMapping.SetReadOnly();
Assert.Equal(
Strings.OperationOnReadOnlyItem,
Assert.Throws<InvalidOperationException>(
() => (associationSetMapping.TargetEndMapping = targetEndMapping)).Message);
}
[Fact]
public void Cannot_set__modification_function_mapping_when_read_only()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var modificationFunctionMapping = new AssociationSetModificationFunctionMapping(associationSet, null, null);
associationSetMapping.SetReadOnly();
Assert.Equal(
Strings.OperationOnReadOnlyItem,
Assert.Throws<InvalidOperationException>(
() => (associationSetMapping.ModificationFunctionMapping = modificationFunctionMapping)).Message);
}
[Fact]
public void Cannot_add_condition_when_read_only()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var conditionPropertyMapping
= new ConditionPropertyMapping(null, new EdmProperty("C", TypeUsage.Create(new PrimitiveType() { DataSpace = DataSpace.SSpace })), 42, null);
associationSetMapping.SetReadOnly();
Assert.Equal(
Strings.OperationOnReadOnlyItem,
Assert.Throws<InvalidOperationException>(
() => associationSetMapping.AddCondition(conditionPropertyMapping)).Message);
}
[Fact]
public void Cannot_remove_condition_when_read_only()
{
var entitySet = new EntitySet();
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping
= new AssociationSetMapping(associationSet, entitySet);
var conditionPropertyMapping
= new ConditionPropertyMapping(null, new EdmProperty("C", TypeUsage.Create(new PrimitiveType() { DataSpace = DataSpace.SSpace })), 42, null);
associationSetMapping.AddCondition(conditionPropertyMapping);
associationSetMapping.SetReadOnly();
Assert.Equal(
Strings.OperationOnReadOnlyItem,
Assert.Throws<InvalidOperationException>(
() => associationSetMapping.RemoveCondition(conditionPropertyMapping)).Message);
}
[Fact]
public void SetReadOnly_is_called_on_child_mapping_items()
{
var entityType = new EntityType("ET", "N", DataSpace.SSpace);
var entitySet = new EntitySet("ES", "S", "T", "Q", entityType);
var associationSet = new AssociationSet("AS", new AssociationType("A", XmlConstants.ModelNamespace_3, false, DataSpace.CSpace));
var associationSetMapping = new AssociationSetMapping(associationSet, entitySet, null);
var modificationFunctionMapping = new AssociationSetModificationFunctionMapping(associationSet, null, null);
associationSetMapping.ModificationFunctionMapping = modificationFunctionMapping;
Assert.False(associationSetMapping.AssociationTypeMapping.IsReadOnly);
Assert.False(modificationFunctionMapping.IsReadOnly);
associationSetMapping.SetReadOnly();
Assert.True(associationSetMapping.AssociationTypeMapping.IsReadOnly);
Assert.True(modificationFunctionMapping.IsReadOnly);
}
}
}
| |
/*
Copyright 2015-2018 Developer Express Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Threading;
using Google.Apis.Bigquery.v2;
using Google.Apis.Bigquery.v2.Data;
using Xunit;
namespace DevExpress.DataAccess.BigQuery.Tests {
public class TestingInfrastructureHelper : IDisposable {
public TestingInfrastructureHelper() {
connection = new BigQueryConnection(ConnectionStringHelper.JsonConnectionString);
connection.Open();
}
public const string NatalityTableName = "natality";
public const string Natality2TableName = "natality2";
public const string NatalityViewName = "natalityview";
public const string TimesTableName = "times";
readonly BigQueryConnection connection;
[Fact(Skip = "Explicit")]
public void CreateDBTables() {
CreateNatalityTable();
CreateNatality2Table();
CreateNatalityView();
CreateTimesTable();
}
[Fact(Skip = "Explicit")]
public void CreateTimesTable() {
CreateDatasetIfRequired();
var table = new Table {
Schema = CreateTimesTableSchema(),
TableReference = new TableReference {
DatasetId = connection.DataSetId,
ProjectId = connection.ProjectId,
TableId = TimesTableName
}
};
InsertTable(table);
UploadData(table);
}
TableSchema CreateTimesTableSchema() {
var time = new TableFieldSchema {
Name = "time",
Type = "TIME",
Mode = "NULLABLE"
};
var date = new TableFieldSchema {
Name = "date",
Type = "DATE",
Mode = "NULLABLE"
};
var timestamp = new TableFieldSchema {
Name = "timestamp",
Type = "TIMESTAMP",
Mode = "NULLABLE"
};
var datetime = new TableFieldSchema {
Name = "datetime",
Type = "DATETIME",
Mode = "NULLABLE"
};
return new TableSchema { Fields = new List<TableFieldSchema> { time, date, timestamp, datetime } };
}
void CreateDatasetIfRequired() {
var dataSetList = connection.Service.Datasets.List(connection.ProjectId).Execute();
if (dataSetList.Datasets == null || dataSetList.Datasets.All(d => d.DatasetReference.DatasetId != connection.DataSetId)) {
var dataSet = new Dataset {
DatasetReference = new DatasetReference { DatasetId = connection.DataSetId, ProjectId = connection.ProjectId }
};
connection.Service.Datasets.Insert(dataSet, connection.ProjectId).Execute();
}
}
[Fact(Skip = "Explicit")]
public void CreateNatalityTable() {
CreateDatasetIfRequired();
var table = new Table {
Schema = CreateNatalityTableSchema(),
TableReference = new TableReference {
DatasetId = connection.DataSetId,
ProjectId = connection.ProjectId,
TableId = NatalityTableName
}
};
InsertTable(table);
UploadData(table);
}
static TableSchema CreateNatalityTableSchema() {
var weight_pounds = new TableFieldSchema {
Name = "weight_pounds",
Type = "FLOAT",
Mode = "NULLABLE"
};
var is_male = new TableFieldSchema {
Name = "is_male",
Type = "BOOLEAN",
Mode = "NULLABLE"
};
return new TableSchema { Fields = new List<TableFieldSchema> { weight_pounds, is_male } };
}
void InsertTable(Table table) {
var tableList = connection.Service.Tables.List(connection.ProjectId, connection.DataSetId).Execute();
if (tableList.Tables != null && tableList.Tables.Any(t => t.TableReference.TableId == table.TableReference.TableId))
connection.Service.Tables.Delete(connection.ProjectId, connection.DataSetId, table.TableReference.TableId).Execute();
connection.Service.Tables.Insert(table, connection.ProjectId, connection.DataSetId).Execute();
}
void UploadData(Table table) {
Job job = new Job();
var config = new JobConfiguration();
var configLoad = new JobConfigurationLoad {
Schema = table.Schema,
DestinationTable = table.TableReference,
Encoding = "ISO-8859-1",
CreateDisposition = "CREATE_IF_NEEDED",
WriteDisposition = "",
FieldDelimiter = ",",
AllowJaggedRows = true,
SourceFormat = "CSV"
};
config.Load = configLoad;
job.Configuration = config;
var jobId = "---" + Environment.TickCount;
var jobRef = new JobReference {
JobId = jobId,
ProjectId = connection.ProjectId
};
job.JobReference = jobRef;
using (
Stream stream =
Assembly.GetExecutingAssembly()
.GetManifestResourceStream(
$"DevExpress.DataAccess.BigQuery.Tests.Tests.{table.TableReference.TableId}.csv")) {
var insertMediaUpload = new JobsResource.InsertMediaUpload(connection.Service,
job, job.JobReference.ProjectId, stream, "application/octet-stream");
insertMediaUpload.Upload();
}
while (true) {
Job job1 = connection.Service.Jobs.Get(connection.ProjectId, jobId).Execute();
if (job1.Status.State.Equals("DONE")) {
break;
}
Thread.Sleep(5000);
}
}
[Fact(Skip = "Explicit")]
public void CreateNatality2Table() {
CreateDatasetIfRequired();
var schema = CreateNatality2TableSchema();
var table = new Table {
Schema = schema,
TableReference = new TableReference {
DatasetId = connection.DataSetId,
ProjectId = connection.ProjectId,
TableId = Natality2TableName
}
};
InsertTable(table);
UploadData(table);
}
static TableSchema CreateNatality2TableSchema() {
var state = new TableFieldSchema {
Name = "state",
Type = "STRING",
Mode = "NULLABLE"
};
var source_year = new TableFieldSchema {
Name = "source_year",
Type = "INTEGER",
Mode = "NULLABLE"
};
var year = new TableFieldSchema {
Name = "year",
Type = "INTEGER",
Mode = "NULLABLE"
};
var weight_pounds = new TableFieldSchema {
Name = "weight_pounds",
Type = "FLOAT",
Mode = "NULLABLE"
};
var mother_married = new TableFieldSchema {
Name = "mother_married",
Type = "BOOLEAN",
Mode = "NULLABLE"
};
return new TableSchema {
Fields = new List<TableFieldSchema> { state, source_year, year, weight_pounds, mother_married }
};
}
[Fact(Skip = "Explicit")]
void CreateNatalityView() {
CreateDatasetIfRequired();
Table table = new Table {
TableReference = new TableReference {
DatasetId = connection.DataSetId,
ProjectId = connection.ProjectId,
TableId = NatalityViewName
},
View = new ViewDefinition {
Query = string.Format(@"SELECT [{1}.year] [year], [{1}.weight_pounds] [weight], [{1}.state] [state]
FROM [{0}.{1}] [{1}]", this.connection.DataSetId, Natality2TableName)
}
};
InsertTable(table);
}
public void Dispose() {
connection.Close();
}
}
}
| |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
namespace Microsoft.Zelig.Debugger.ArmProcessor
{
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Drawing;
using System.Drawing.Imaging;
using System.Threading;
using System.Windows.Forms;
using System.Runtime.CompilerServices;
using Microsoft.Zelig.TargetModel.ArmProcessor;
using Microsoft.Zelig.CodeGeneration.IR.Abstractions;
using EncDef = Microsoft.Zelig.TargetModel.ArmProcessor.EncodingDefinition_ARM;
using Cfg = Microsoft.Zelig.Configuration.Environment;
using IR = Microsoft.Zelig.CodeGeneration.IR;
public class ProcessorHost : Emulation.Hosting.AbstractHost
{
class MemoryDeltaHolder : IDisposable
{
//
// State
//
ProcessorHost m_host;
//
// Constructor Methods
//
internal MemoryDeltaHolder( ProcessorHost host )
{
m_host = host;
lock(m_host)
{
foreach(var memDelta in m_host.m_notifyOnEnteringExecuting.ToArray())
{
memDelta.EnteringExecuting();
}
}
}
//
// Helper Methods
//
public void Dispose()
{
lock(m_host)
{
foreach(var memDelta in m_host.m_notifyOnExitingRunning.ToArray())
{
memDelta.ExitingRunning();
}
}
}
}
class SimulatorControlImpl : Emulation.Hosting.SimulatorControl
{
//
// State
//
ProcessorHost m_host;
//
// Constructor Methods
//
internal SimulatorControlImpl( ProcessorHost host )
{
m_host = host;
m_host.RegisterService( typeof(Emulation.Hosting.SimulatorControl), this );
}
//
// Helper Methods
//
public override void Wait( TimeSpan tm )
{
double span = tm.TotalMilliseconds;
int val = (span > 0 && span < int.MaxValue) ? (int)span : int.MaxValue;
if(val > 100)
{
val = 100;
}
m_host.m_implHalEvents.m_systemEvent.WaitOne( val, false );
}
}
//--//
class HalButtonsImpl : Emulation.Hosting.HalButtons
{
struct ButtonRecord
{
//
// State
//
internal uint m_buttonsPressed;
internal uint m_buttonsReleased;
}
//
// State
//
ProcessorHost m_host;
Queue<ButtonRecord> m_buttonQueue = new Queue<ButtonRecord>();
//
// Constructor Methods
//
internal HalButtonsImpl( ProcessorHost host )
{
m_host = host;
m_host.RegisterService( typeof(Emulation.Hosting.HalButtons), this );
}
//
// Helper Methods
//
public override bool GetNextStateChange( out uint buttonsPressed ,
out uint buttonsReleased )
{
lock(this)
{
if(m_buttonQueue.Count > 0)
{
ButtonRecord br = m_buttonQueue.Dequeue();
buttonsPressed = br.m_buttonsPressed;
buttonsReleased = br.m_buttonsReleased;
return true;
}
buttonsPressed = 0;
buttonsReleased = 0;
return false;
}
}
public override void QueueNextStateChange( uint buttonsPressed ,
uint buttonsReleased )
{
lock(this)
{
ButtonRecord br;
br.m_buttonsPressed = buttonsPressed;
br.m_buttonsReleased = buttonsReleased;
m_buttonQueue.Enqueue( br );
m_host.m_implHalEvents.Set( Emulation.Hosting.HalEvents.SYSTEM_EVENT_FLAG_BUTTON );
}
}
}
//--//
class HalEventsImpl : Emulation.Hosting.HalEvents
{
//
// State
//
private ProcessorHost m_host;
internal AutoResetEvent m_systemEvent = new AutoResetEvent( false );
internal uint m_systemFlags = 0;
//
// Constructor Methods
//
internal HalEventsImpl( ProcessorHost host )
{
m_host = host;
m_host.RegisterService( typeof(Emulation.Hosting.HalEvents), this );
}
//
// Helper Methods
//
public override void Clear( uint mask )
{
lock(this)
{
m_systemFlags &= ~mask;
}
}
public override void Set( uint mask )
{
lock(this)
{
m_systemFlags |= mask;
m_systemEvent.Set();
}
}
public override uint Get( uint mask )
{
lock(this)
{
uint res = m_systemFlags & mask;
m_systemFlags &= ~mask;
return res;
}
}
public override uint MaskedRead( uint mask )
{
lock(this)
{
return m_systemFlags & mask;
}
}
}
//--//
//
// State
//
DebuggerMainForm m_owner;
Emulation.Hosting.AbstractEngine m_activeEngine;
Dictionary<Type, Emulation.Hosting.AbstractEngine> m_engines;
List< Emulation.Hosting.Breakpoint > m_breakpoints;
SimulatorControlImpl m_implSimulatorControl;
HalButtonsImpl m_implHalButtons;
HalEventsImpl m_implHalEvents;
MemoryDelta m_memoryDelta;
List< MemoryDelta > m_notifyOnEnteringExecuting;
List< MemoryDelta > m_notifyOnExitingRunning;
uint m_softBreakpointTableAddress;
//
// Constructor Methods
//
public ProcessorHost( DebuggerMainForm owner )
{
this.RegisterService( typeof(Emulation.Hosting.AbstractHost), this );
this.RegisterService( typeof(ProcessorHost ), this );
//--//
m_owner = owner;
m_engines = new Dictionary< Type, Emulation.Hosting.AbstractEngine >();
m_breakpoints = new List< Emulation.Hosting.Breakpoint >();
m_implSimulatorControl = new SimulatorControlImpl( this );
m_implHalButtons = new HalButtonsImpl ( this );
m_implHalEvents = new HalEventsImpl ( this );
m_notifyOnEnteringExecuting = new List< MemoryDelta >();
m_notifyOnExitingRunning = new List< MemoryDelta >();
}
public void SelectEngine(Cfg.EngineCategory category, InstructionSet iset)
{
m_softBreakpointTableAddress = 0;
this.Unlink( m_activeEngine );
bool match = false;
if(m_engines.TryGetValue(category.GetType(), out m_activeEngine) == true)
{
if(m_activeEngine.InstructionSet == iset)
{
match = true;
}
else
{
m_engines.Remove(category.GetType());
}
}
if(!match)
{
m_activeEngine = category.Instantiate(iset) as Emulation.Hosting.AbstractEngine;
if(m_activeEngine == null)
{
throw TypeConsistencyErrorException.Create("Unrecognized engine: {0}", category);
}
m_engines[category.GetType()] = m_activeEngine;
}
this.Link( m_activeEngine );
//--//
m_breakpoints.Clear();
}
//
// Helper Methods
//
internal bool GetAbsoluteTime( out ulong clockTicks ,
out ulong nanoseconds )
{
Emulation.Hosting.DeviceClockTicksTracking svc; GetHostingService( out svc );
if(svc == null)
{
clockTicks = 0;
nanoseconds = 0;
return false;
}
return svc.GetAbsoluteTime( out clockTicks, out nanoseconds );
}
public void Execute( ImageInformation imageInformation ,
Cfg.ProductCategory product )
{
ExecuteInner( imageInformation, product, false );
}
public void ExecuteStep( ImageInformation imageInformation ,
Cfg.ProductCategory product )
{
ExecuteInner( imageInformation, product, true );
}
void ExecuteInner( ImageInformation imageInformation ,
Cfg.ProductCategory product ,
bool fSingleStep )
{
Emulation.Hosting.ProcessorStatus svcPS; this.GetHostingService( out svcPS );
Emulation.Hosting.ProcessorControl svcPC; this.GetHostingService( out svcPC );
Emulation.Hosting.MemoryProvider svcMP; this.GetHostingService( out svcMP );
Emulation.Hosting.JTagConnector svcJTAG; this.GetHostingService( out svcJTAG );
var lst = new List< Emulation.Hosting.Breakpoint >();
object softBreakpointOpcode;
int maxHardBreakpoints;
svcPC.GetBreakpointCapabilities( out softBreakpointOpcode, out maxHardBreakpoints );
var cleanup = HashTableFactory.New< uint, uint >();
while(true)
{
lst.Clear();
uint pc = svcPS.ProgramCounter;
bool fStep = fSingleStep;
foreach(var bp in m_breakpoints)
{
fStep |= bp.ShouldStopOverStatement( pc );
bp.SetAs = Emulation.Hosting.Breakpoint.Status.NotSet;
}
cleanup.Clear();
for(int pass = 0; pass < 3; pass++)
{
bool fForceHardware = (pass == 0);
bool fIncludeOptional = (pass == 2);
foreach(var bp in m_breakpoints)
{
if(bp.IsActive &&
bp.ShouldIgnoreOnce == false &&
bp.ShouldImplementInHardware == fForceHardware &&
bp.IsOptional == fIncludeOptional )
{
uint address = bp.Address;
if(bp.ShouldImplementInHardware == false)
{
if(product != null && softBreakpointOpcode is uint)
{
var mem = product.FindMemory( address );
if(mem != null && mem.IsRAM)
{
if(cleanup.ContainsKey( address ) == false)
{
uint val;
if(svcMP.GetUInt32( address, out val ))
{
uint expectedVal;
if(imageInformation.TryReadUInt32FromPhysicalImage( address, out expectedVal ) && expectedVal == val)
{
cleanup[address] = val;
svcMP.SetUInt32( address, (uint)softBreakpointOpcode );
bp.SetAs = Emulation.Hosting.Breakpoint.Status.SoftBreakpoint;
continue;
}
}
}
}
}
}
if(lst.Count < maxHardBreakpoints)
{
lst.Add( bp );
bp.SetAs = Emulation.Hosting.Breakpoint.Status.HardBreakpoint;
}
}
}
}
if(cleanup.Count > 0 && svcJTAG != null)
{
PublishSoftBreakpoints( imageInformation, product, svcMP, svcJTAG, cleanup );
FlushCache( imageInformation, product, svcMP, svcJTAG );
}
//--//
svcPC.StopExecution = false;
if(fStep)
{
svcPC.ExecuteStep( lst );
}
else
{
svcPC.Execute( lst );
}
if(cleanup.Count > 0)
{
FlushSoftBreakpoints( svcMP );
foreach(uint address in cleanup.Keys)
{
svcMP.SetUInt32( address, cleanup[address] );
}
}
pc = svcPS.ProgramCounter;
Emulation.Hosting.Breakpoint.Response res = Emulation.Hosting.Breakpoint.Response.DoNothing;
foreach(var bp in m_breakpoints.ToArray())
{
if(bp.IsActive &&
bp.ShouldIgnoreOnce == false )
{
if(bp.Address == pc)
{
res |= bp.Hit();
}
}
bp.ClearIgnoreFlag();
}
if(fSingleStep)
{
break;
}
if((res & Emulation.Hosting.Breakpoint.Response.StopExecution) != 0)
{
break;
}
if((res & Emulation.Hosting.Breakpoint.Response.NextInstruction) != 0)
{
continue;
}
if(fStep)
{
continue;
}
break;
}
RemoveTemporaryBreakpoints();
}
private void PublishSoftBreakpoints( ImageInformation imageInformation ,
Cfg.ProductCategory product ,
Emulation.Hosting.MemoryProvider svcMP ,
Emulation.Hosting.JTagConnector svcJTAG ,
GrowOnlyHashTable< uint, uint > cleanup )
{
if(m_softBreakpointTableAddress == 0)
{
var md = imageInformation.TypeSystem.TryGetHandler( Runtime.DebuggerHook.GetSoftBreakpointTable );
if(md != null)
{
var reg = imageInformation.ResolveMethodToRegion( md );
if(reg != null)
{
var mem = product.FindAnyBootstrapRAM();
if(mem != null)
{
uint blockEnd = mem.EndAddress;
uint blockStart = blockEnd - 128;
byte[] oldState;
svcMP.GetBlock( blockStart, 128, 4, out oldState );
var input = new []
{
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "CPSR" , Value = EncDef.c_psr_I | EncDef.c_psr_F | EncDef.c_psr_mode_SVC },
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "PC" , Value = reg.ExternalAddress },
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "Svc_R13", Value = blockEnd },
};
var output = new []
{
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "R0" },
};
svcJTAG.ExecuteCode( 1000, input, output );
svcMP.SetBlock( blockStart, oldState, 4 );
if(output[0].Value is uint)
{
m_softBreakpointTableAddress = (uint)output[0].Value;
}
}
}
}
}
if(m_softBreakpointTableAddress != 0)
{
uint len;
if(svcMP.GetUInt32( m_softBreakpointTableAddress, out len ))
{
uint pos = 0;
foreach(var address in cleanup.Keys)
{
if(pos < len)
{
svcMP.SetUInt32( m_softBreakpointTableAddress + sizeof(uint) + pos * 8 , address );
svcMP.SetUInt32( m_softBreakpointTableAddress + sizeof(uint) + pos * 8 + 4, cleanup[address] );
}
pos++;
}
}
}
}
private void FlushSoftBreakpoints( Emulation.Hosting.MemoryProvider svcMP )
{
if(m_softBreakpointTableAddress != 0)
{
uint len;
if(svcMP.GetUInt32( m_softBreakpointTableAddress, out len ))
{
if(len > 0)
{
svcMP.SetUInt32( m_softBreakpointTableAddress + sizeof(uint), 0 );
}
}
}
}
private static void FlushCache( ImageInformation imageInformation ,
Cfg.ProductCategory product ,
Emulation.Hosting.MemoryProvider svcMP ,
Emulation.Hosting.JTagConnector svcJTAG )
{
var md = imageInformation.TypeSystem.TryGetHandler( Runtime.DebuggerHook.FlushInstructionCache );
if(md != null)
{
var reg = imageInformation.ResolveMethodToRegion( md );
if(reg != null)
{
var mem = product.FindAnyBootstrapRAM();
if(mem != null)
{
uint blockEnd = mem.EndAddress;
uint blockStart = blockEnd - 128;
byte[] oldState;
svcMP.GetBlock( blockStart, 128, 4, out oldState );
var input = new []
{
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "CPSR" , Value = EncDef.c_psr_I | EncDef.c_psr_F | EncDef.c_psr_mode_SVC },
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "PC" , Value = reg.ExternalAddress },
new Emulation.Hosting.JTagConnector.RegisterSet { Name = "Svc_R13", Value = blockEnd },
};
svcJTAG.ExecuteCode( 1000, input, null );
svcMP.SetBlock( blockStart, oldState, 4 );
}
}
}
}
//--//
internal void RegisterForNotification( MemoryDelta memDelta ,
bool fEntering ,
bool fExiting )
{
if(fEntering)
{
if(m_notifyOnEnteringExecuting.Contains( memDelta ) == false)
{
m_notifyOnEnteringExecuting.Add( memDelta );
}
}
if(fExiting)
{
if(m_notifyOnExitingRunning.Contains( memDelta ) == false)
{
m_notifyOnExitingRunning.Add( memDelta );
}
}
}
internal void UnregisterForNotification( MemoryDelta memDelta ,
bool fEntering ,
bool fExiting )
{
if(fEntering)
{
m_notifyOnEnteringExecuting.Remove( memDelta );
}
if(fExiting)
{
m_notifyOnExitingRunning.Remove( memDelta );
}
}
public IDisposable SuspendMemoryDeltaUpdates()
{
return new MemoryDeltaHolder( this );
}
//--//
private void RemoveTemporaryBreakpoints()
{
for(int pos = m_breakpoints.Count; --pos >= 0; )
{
Emulation.Hosting.Breakpoint bp = m_breakpoints[pos];
if(bp.IsTemporary)
{
m_breakpoints.RemoveAt( pos );
}
}
}
public Emulation.Hosting.Breakpoint CreateBreakpoint( uint address ,
Debugging.DebugInfo di ,
Emulation.Hosting.Breakpoint.Callback target )
{
foreach(var bp in m_breakpoints)
{
if(bp.Address == address &&
bp.DebugInfo == di &&
bp.Target == target )
{
return bp;
}
}
var bpNew = new Emulation.Hosting.Breakpoint( address, di, target );
m_breakpoints.Add( bpNew );
return bpNew;
}
public void RestoreBreakpoint( Emulation.Hosting.Breakpoint bp )
{
m_breakpoints.Remove( bp );
m_breakpoints.Add ( bp );
}
public void RemoveBreakpoint( Emulation.Hosting.Breakpoint bp )
{
m_breakpoints.Remove( bp );
}
//
// Access Methods
//
public Emulation.Hosting.Breakpoint[] Breakpoints
{
get
{
return m_breakpoints.ToArray();
}
}
public MemoryDelta MemoryDelta
{
get
{
lock(this)
{
if(m_memoryDelta == null)
{
m_memoryDelta = new MemoryDelta( m_owner.ImageInformation, this );
RegisterForNotification( m_memoryDelta, true, true );
}
else
{
m_memoryDelta.Synchronize( m_owner.ImageInformation, this );
}
}
return m_memoryDelta;
}
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace NorthwindRepository{
/// <summary>
/// Strongly-typed collection for the AlphabeticalListOfProduct class.
/// </summary>
[Serializable]
public partial class AlphabeticalListOfProductCollection : ReadOnlyList<AlphabeticalListOfProduct, AlphabeticalListOfProductCollection>
{
public AlphabeticalListOfProductCollection() {}
}
/// <summary>
/// This is Read-only wrapper class for the Alphabetical list of products view.
/// </summary>
[Serializable]
public partial class AlphabeticalListOfProduct : ReadOnlyRecord<AlphabeticalListOfProduct>, IReadOnlyRecord
{
#region Default Settings
protected static void SetSQLProps()
{
GetTableSchema();
}
#endregion
#region Schema Accessor
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
{
SetSQLProps();
}
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Alphabetical list of products", TableType.View, DataService.GetInstance("NorthwindRepository"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarProductID = new TableSchema.TableColumn(schema);
colvarProductID.ColumnName = "ProductID";
colvarProductID.DataType = DbType.Int32;
colvarProductID.MaxLength = 0;
colvarProductID.AutoIncrement = false;
colvarProductID.IsNullable = false;
colvarProductID.IsPrimaryKey = false;
colvarProductID.IsForeignKey = false;
colvarProductID.IsReadOnly = false;
schema.Columns.Add(colvarProductID);
TableSchema.TableColumn colvarProductName = new TableSchema.TableColumn(schema);
colvarProductName.ColumnName = "ProductName";
colvarProductName.DataType = DbType.String;
colvarProductName.MaxLength = 40;
colvarProductName.AutoIncrement = false;
colvarProductName.IsNullable = false;
colvarProductName.IsPrimaryKey = false;
colvarProductName.IsForeignKey = false;
colvarProductName.IsReadOnly = false;
schema.Columns.Add(colvarProductName);
TableSchema.TableColumn colvarSupplierID = new TableSchema.TableColumn(schema);
colvarSupplierID.ColumnName = "SupplierID";
colvarSupplierID.DataType = DbType.Int32;
colvarSupplierID.MaxLength = 0;
colvarSupplierID.AutoIncrement = false;
colvarSupplierID.IsNullable = true;
colvarSupplierID.IsPrimaryKey = false;
colvarSupplierID.IsForeignKey = false;
colvarSupplierID.IsReadOnly = false;
schema.Columns.Add(colvarSupplierID);
TableSchema.TableColumn colvarCategoryID = new TableSchema.TableColumn(schema);
colvarCategoryID.ColumnName = "CategoryID";
colvarCategoryID.DataType = DbType.Int32;
colvarCategoryID.MaxLength = 0;
colvarCategoryID.AutoIncrement = false;
colvarCategoryID.IsNullable = true;
colvarCategoryID.IsPrimaryKey = false;
colvarCategoryID.IsForeignKey = false;
colvarCategoryID.IsReadOnly = false;
schema.Columns.Add(colvarCategoryID);
TableSchema.TableColumn colvarQuantityPerUnit = new TableSchema.TableColumn(schema);
colvarQuantityPerUnit.ColumnName = "QuantityPerUnit";
colvarQuantityPerUnit.DataType = DbType.String;
colvarQuantityPerUnit.MaxLength = 20;
colvarQuantityPerUnit.AutoIncrement = false;
colvarQuantityPerUnit.IsNullable = true;
colvarQuantityPerUnit.IsPrimaryKey = false;
colvarQuantityPerUnit.IsForeignKey = false;
colvarQuantityPerUnit.IsReadOnly = false;
schema.Columns.Add(colvarQuantityPerUnit);
TableSchema.TableColumn colvarUnitPrice = new TableSchema.TableColumn(schema);
colvarUnitPrice.ColumnName = "UnitPrice";
colvarUnitPrice.DataType = DbType.Currency;
colvarUnitPrice.MaxLength = 0;
colvarUnitPrice.AutoIncrement = false;
colvarUnitPrice.IsNullable = true;
colvarUnitPrice.IsPrimaryKey = false;
colvarUnitPrice.IsForeignKey = false;
colvarUnitPrice.IsReadOnly = false;
schema.Columns.Add(colvarUnitPrice);
TableSchema.TableColumn colvarUnitsInStock = new TableSchema.TableColumn(schema);
colvarUnitsInStock.ColumnName = "UnitsInStock";
colvarUnitsInStock.DataType = DbType.Int16;
colvarUnitsInStock.MaxLength = 0;
colvarUnitsInStock.AutoIncrement = false;
colvarUnitsInStock.IsNullable = true;
colvarUnitsInStock.IsPrimaryKey = false;
colvarUnitsInStock.IsForeignKey = false;
colvarUnitsInStock.IsReadOnly = false;
schema.Columns.Add(colvarUnitsInStock);
TableSchema.TableColumn colvarUnitsOnOrder = new TableSchema.TableColumn(schema);
colvarUnitsOnOrder.ColumnName = "UnitsOnOrder";
colvarUnitsOnOrder.DataType = DbType.Int16;
colvarUnitsOnOrder.MaxLength = 0;
colvarUnitsOnOrder.AutoIncrement = false;
colvarUnitsOnOrder.IsNullable = true;
colvarUnitsOnOrder.IsPrimaryKey = false;
colvarUnitsOnOrder.IsForeignKey = false;
colvarUnitsOnOrder.IsReadOnly = false;
schema.Columns.Add(colvarUnitsOnOrder);
TableSchema.TableColumn colvarReorderLevel = new TableSchema.TableColumn(schema);
colvarReorderLevel.ColumnName = "ReorderLevel";
colvarReorderLevel.DataType = DbType.Int16;
colvarReorderLevel.MaxLength = 0;
colvarReorderLevel.AutoIncrement = false;
colvarReorderLevel.IsNullable = true;
colvarReorderLevel.IsPrimaryKey = false;
colvarReorderLevel.IsForeignKey = false;
colvarReorderLevel.IsReadOnly = false;
schema.Columns.Add(colvarReorderLevel);
TableSchema.TableColumn colvarDiscontinued = new TableSchema.TableColumn(schema);
colvarDiscontinued.ColumnName = "Discontinued";
colvarDiscontinued.DataType = DbType.Boolean;
colvarDiscontinued.MaxLength = 0;
colvarDiscontinued.AutoIncrement = false;
colvarDiscontinued.IsNullable = false;
colvarDiscontinued.IsPrimaryKey = false;
colvarDiscontinued.IsForeignKey = false;
colvarDiscontinued.IsReadOnly = false;
schema.Columns.Add(colvarDiscontinued);
TableSchema.TableColumn colvarAttributeXML = new TableSchema.TableColumn(schema);
colvarAttributeXML.ColumnName = "AttributeXML";
colvarAttributeXML.DataType = DbType.AnsiString;
colvarAttributeXML.MaxLength = -1;
colvarAttributeXML.AutoIncrement = false;
colvarAttributeXML.IsNullable = true;
colvarAttributeXML.IsPrimaryKey = false;
colvarAttributeXML.IsForeignKey = false;
colvarAttributeXML.IsReadOnly = false;
schema.Columns.Add(colvarAttributeXML);
TableSchema.TableColumn colvarDateCreated = new TableSchema.TableColumn(schema);
colvarDateCreated.ColumnName = "DateCreated";
colvarDateCreated.DataType = DbType.DateTime;
colvarDateCreated.MaxLength = 0;
colvarDateCreated.AutoIncrement = false;
colvarDateCreated.IsNullable = true;
colvarDateCreated.IsPrimaryKey = false;
colvarDateCreated.IsForeignKey = false;
colvarDateCreated.IsReadOnly = false;
schema.Columns.Add(colvarDateCreated);
TableSchema.TableColumn colvarProductGUID = new TableSchema.TableColumn(schema);
colvarProductGUID.ColumnName = "ProductGUID";
colvarProductGUID.DataType = DbType.Guid;
colvarProductGUID.MaxLength = 0;
colvarProductGUID.AutoIncrement = false;
colvarProductGUID.IsNullable = true;
colvarProductGUID.IsPrimaryKey = false;
colvarProductGUID.IsForeignKey = false;
colvarProductGUID.IsReadOnly = false;
schema.Columns.Add(colvarProductGUID);
TableSchema.TableColumn colvarCreatedOn = new TableSchema.TableColumn(schema);
colvarCreatedOn.ColumnName = "CreatedOn";
colvarCreatedOn.DataType = DbType.DateTime;
colvarCreatedOn.MaxLength = 0;
colvarCreatedOn.AutoIncrement = false;
colvarCreatedOn.IsNullable = false;
colvarCreatedOn.IsPrimaryKey = false;
colvarCreatedOn.IsForeignKey = false;
colvarCreatedOn.IsReadOnly = false;
schema.Columns.Add(colvarCreatedOn);
TableSchema.TableColumn colvarCreatedBy = new TableSchema.TableColumn(schema);
colvarCreatedBy.ColumnName = "CreatedBy";
colvarCreatedBy.DataType = DbType.String;
colvarCreatedBy.MaxLength = 50;
colvarCreatedBy.AutoIncrement = false;
colvarCreatedBy.IsNullable = true;
colvarCreatedBy.IsPrimaryKey = false;
colvarCreatedBy.IsForeignKey = false;
colvarCreatedBy.IsReadOnly = false;
schema.Columns.Add(colvarCreatedBy);
TableSchema.TableColumn colvarModifiedOn = new TableSchema.TableColumn(schema);
colvarModifiedOn.ColumnName = "ModifiedOn";
colvarModifiedOn.DataType = DbType.DateTime;
colvarModifiedOn.MaxLength = 0;
colvarModifiedOn.AutoIncrement = false;
colvarModifiedOn.IsNullable = false;
colvarModifiedOn.IsPrimaryKey = false;
colvarModifiedOn.IsForeignKey = false;
colvarModifiedOn.IsReadOnly = false;
schema.Columns.Add(colvarModifiedOn);
TableSchema.TableColumn colvarModifiedBy = new TableSchema.TableColumn(schema);
colvarModifiedBy.ColumnName = "ModifiedBy";
colvarModifiedBy.DataType = DbType.String;
colvarModifiedBy.MaxLength = 50;
colvarModifiedBy.AutoIncrement = false;
colvarModifiedBy.IsNullable = true;
colvarModifiedBy.IsPrimaryKey = false;
colvarModifiedBy.IsForeignKey = false;
colvarModifiedBy.IsReadOnly = false;
schema.Columns.Add(colvarModifiedBy);
TableSchema.TableColumn colvarDeleted = new TableSchema.TableColumn(schema);
colvarDeleted.ColumnName = "Deleted";
colvarDeleted.DataType = DbType.Boolean;
colvarDeleted.MaxLength = 0;
colvarDeleted.AutoIncrement = false;
colvarDeleted.IsNullable = false;
colvarDeleted.IsPrimaryKey = false;
colvarDeleted.IsForeignKey = false;
colvarDeleted.IsReadOnly = false;
schema.Columns.Add(colvarDeleted);
TableSchema.TableColumn colvarCategoryName = new TableSchema.TableColumn(schema);
colvarCategoryName.ColumnName = "CategoryName";
colvarCategoryName.DataType = DbType.String;
colvarCategoryName.MaxLength = 15;
colvarCategoryName.AutoIncrement = false;
colvarCategoryName.IsNullable = false;
colvarCategoryName.IsPrimaryKey = false;
colvarCategoryName.IsForeignKey = false;
colvarCategoryName.IsReadOnly = false;
schema.Columns.Add(colvarCategoryName);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["NorthwindRepository"].AddSchema("Alphabetical list of products",schema);
}
}
#endregion
#region Query Accessor
public static Query CreateQuery()
{
return new Query(Schema);
}
#endregion
#region .ctors
public AlphabeticalListOfProduct()
{
SetSQLProps();
SetDefaults();
MarkNew();
}
public AlphabeticalListOfProduct(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
{
ForceDefaults();
}
MarkNew();
}
public AlphabeticalListOfProduct(object keyID)
{
SetSQLProps();
LoadByKey(keyID);
}
public AlphabeticalListOfProduct(string columnName, object columnValue)
{
SetSQLProps();
LoadByParam(columnName,columnValue);
}
#endregion
#region Props
[XmlAttribute("ProductID")]
[Bindable(true)]
public int ProductID
{
get
{
return GetColumnValue<int>("ProductID");
}
set
{
SetColumnValue("ProductID", value);
}
}
[XmlAttribute("ProductName")]
[Bindable(true)]
public string ProductName
{
get
{
return GetColumnValue<string>("ProductName");
}
set
{
SetColumnValue("ProductName", value);
}
}
[XmlAttribute("SupplierID")]
[Bindable(true)]
public int? SupplierID
{
get
{
return GetColumnValue<int?>("SupplierID");
}
set
{
SetColumnValue("SupplierID", value);
}
}
[XmlAttribute("CategoryID")]
[Bindable(true)]
public int? CategoryID
{
get
{
return GetColumnValue<int?>("CategoryID");
}
set
{
SetColumnValue("CategoryID", value);
}
}
[XmlAttribute("QuantityPerUnit")]
[Bindable(true)]
public string QuantityPerUnit
{
get
{
return GetColumnValue<string>("QuantityPerUnit");
}
set
{
SetColumnValue("QuantityPerUnit", value);
}
}
[XmlAttribute("UnitPrice")]
[Bindable(true)]
public decimal? UnitPrice
{
get
{
return GetColumnValue<decimal?>("UnitPrice");
}
set
{
SetColumnValue("UnitPrice", value);
}
}
[XmlAttribute("UnitsInStock")]
[Bindable(true)]
public short? UnitsInStock
{
get
{
return GetColumnValue<short?>("UnitsInStock");
}
set
{
SetColumnValue("UnitsInStock", value);
}
}
[XmlAttribute("UnitsOnOrder")]
[Bindable(true)]
public short? UnitsOnOrder
{
get
{
return GetColumnValue<short?>("UnitsOnOrder");
}
set
{
SetColumnValue("UnitsOnOrder", value);
}
}
[XmlAttribute("ReorderLevel")]
[Bindable(true)]
public short? ReorderLevel
{
get
{
return GetColumnValue<short?>("ReorderLevel");
}
set
{
SetColumnValue("ReorderLevel", value);
}
}
[XmlAttribute("Discontinued")]
[Bindable(true)]
public bool Discontinued
{
get
{
return GetColumnValue<bool>("Discontinued");
}
set
{
SetColumnValue("Discontinued", value);
}
}
[XmlAttribute("AttributeXML")]
[Bindable(true)]
public string AttributeXML
{
get
{
return GetColumnValue<string>("AttributeXML");
}
set
{
SetColumnValue("AttributeXML", value);
}
}
[XmlAttribute("DateCreated")]
[Bindable(true)]
public DateTime? DateCreated
{
get
{
return GetColumnValue<DateTime?>("DateCreated");
}
set
{
SetColumnValue("DateCreated", value);
}
}
[XmlAttribute("ProductGUID")]
[Bindable(true)]
public Guid? ProductGUID
{
get
{
return GetColumnValue<Guid?>("ProductGUID");
}
set
{
SetColumnValue("ProductGUID", value);
}
}
[XmlAttribute("CreatedOn")]
[Bindable(true)]
public DateTime CreatedOn
{
get
{
return GetColumnValue<DateTime>("CreatedOn");
}
set
{
SetColumnValue("CreatedOn", value);
}
}
[XmlAttribute("CreatedBy")]
[Bindable(true)]
public string CreatedBy
{
get
{
return GetColumnValue<string>("CreatedBy");
}
set
{
SetColumnValue("CreatedBy", value);
}
}
[XmlAttribute("ModifiedOn")]
[Bindable(true)]
public DateTime ModifiedOn
{
get
{
return GetColumnValue<DateTime>("ModifiedOn");
}
set
{
SetColumnValue("ModifiedOn", value);
}
}
[XmlAttribute("ModifiedBy")]
[Bindable(true)]
public string ModifiedBy
{
get
{
return GetColumnValue<string>("ModifiedBy");
}
set
{
SetColumnValue("ModifiedBy", value);
}
}
[XmlAttribute("Deleted")]
[Bindable(true)]
public bool Deleted
{
get
{
return GetColumnValue<bool>("Deleted");
}
set
{
SetColumnValue("Deleted", value);
}
}
[XmlAttribute("CategoryName")]
[Bindable(true)]
public string CategoryName
{
get
{
return GetColumnValue<string>("CategoryName");
}
set
{
SetColumnValue("CategoryName", value);
}
}
#endregion
#region Columns Struct
public struct Columns
{
public static string ProductID = @"ProductID";
public static string ProductName = @"ProductName";
public static string SupplierID = @"SupplierID";
public static string CategoryID = @"CategoryID";
public static string QuantityPerUnit = @"QuantityPerUnit";
public static string UnitPrice = @"UnitPrice";
public static string UnitsInStock = @"UnitsInStock";
public static string UnitsOnOrder = @"UnitsOnOrder";
public static string ReorderLevel = @"ReorderLevel";
public static string Discontinued = @"Discontinued";
public static string AttributeXML = @"AttributeXML";
public static string DateCreated = @"DateCreated";
public static string ProductGUID = @"ProductGUID";
public static string CreatedOn = @"CreatedOn";
public static string CreatedBy = @"CreatedBy";
public static string ModifiedOn = @"ModifiedOn";
public static string ModifiedBy = @"ModifiedBy";
public static string Deleted = @"Deleted";
public static string CategoryName = @"CategoryName";
}
#endregion
#region IAbstractRecord Members
public new CT GetColumnValue<CT>(string columnName) {
return base.GetColumnValue<CT>(columnName);
}
public object GetColumnValue(string columnName) {
return base.GetColumnValue<object>(columnName);
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Microsoft.AspNetCore.Internal;
using Microsoft.AspNetCore.Testing;
using Xunit;
namespace Microsoft.AspNetCore.Internal.Tests
{
public class AdaptiveCapacityDictionaryTests
{
[Fact]
public void DefaultCtor()
{
// Arrange
// Act
var dict = new AdaptiveCapacityDictionary<string, string>();
// Assert
Assert.Empty(dict);
Assert.Empty(dict._arrayStorage);
Assert.Null(dict._dictionaryStorage);
}
[Fact]
public void CreateFromNull()
{
// Arrange
// Act
var dict = new AdaptiveCapacityDictionary<string, string>();
// Assert
Assert.Empty(dict);
Assert.Empty(dict._arrayStorage);
Assert.Null(dict._dictionaryStorage);
}
[Fact]
public void CreateWithCapacityOverDefaultLimit()
{
// The default threshold between array and dictionary is 10. If we created one over that limit it should go directly to a dictionary.
var dict = new AdaptiveCapacityDictionary<string, string>(capacity: 12, StringComparer.OrdinalIgnoreCase);
Assert.Null(dict._arrayStorage);
Assert.NotNull(dict._dictionaryStorage);
for (var i = 0; i < 12; i++)
{
dict[i.ToString(CultureInfo.InvariantCulture)] = i.ToString(CultureInfo.InvariantCulture);
}
Assert.Null(dict._arrayStorage);
Assert.NotNull(dict._dictionaryStorage);
Assert.Equal(12, dict.Count);
}
[Fact]
public void CreateFromIEnumerableKeyValuePair_ThrowsExceptionForDuplicateKey()
{
// Arrange, Act & Assert
ExceptionAssert.ThrowsArgument(
() => new AdaptiveCapacityDictionary<string, object?>(StringComparer.OrdinalIgnoreCase)
{
{ "name", "Billy" },
{ "Name", "Joey" }
},
"key",
$"An element with the key 'Name' already exists in the {nameof(AdaptiveCapacityDictionary<string, object?>)}.");
}
[Fact]
public void Comparer_IsOrdinalIgnoreCase()
{
// Arrange
// Act
var dict = new AdaptiveCapacityDictionary<string, string>(StringComparer.OrdinalIgnoreCase);
// Assert
Assert.Same(StringComparer.OrdinalIgnoreCase, dict.Comparer);
}
// Our comparer is hardcoded to be IsReadOnly==false no matter what.
[Fact]
public void IsReadOnly_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).IsReadOnly;
// Assert
Assert.False(result);
}
[Fact]
public void IndexGet_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var value = dict[""];
// Assert
Assert.Null(value);
}
[Fact]
public void IndexGet_EmptyStorage_ReturnsNull()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var value = dict["key"];
// Assert
Assert.Null(value);
}
[Fact]
public void IndexGet_ArrayStorage_NoMatch_ReturnsNull()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
dict.Add("age", 30);
// Act
var value = dict["key"];
// Assert
Assert.Null(value);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void IndexGet_ListStorage_Match_ReturnsValue()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var value = dict["key"];
// Assert
Assert.Equal("value", value);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void IndexGet_ListStorage_MatchIgnoreCase_ReturnsValue()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
// Act
var value = dict["kEy"];
// Assert
Assert.Equal("value", value);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void IndexSet_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
dict[""] = "foo";
// Assert
Assert.Equal("foo", dict[""]);
}
[Fact]
public void IndexSet_EmptyStorage_UpgradesToList()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act
dict["key"] = "value";
// Assert
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void IndexSet_ListStorage_NoMatch_AddsValue()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "age", 30 },
};
// Act
dict["key"] = "value";
// Assert
Assert.Collection(
dict.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("age", kvp.Key); Assert.Equal(30, kvp.Value); },
kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void IndexSet_ListStorage_Match_SetsValue()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
dict["key"] = "value";
// Assert
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void IndexSet_ListStorage_MatchIgnoreCase_SetsValue()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
dict["key"] = "value";
// Assert
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Count_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var count = dict.Count;
// Assert
Assert.Equal(0, count);
}
[Fact]
public void Count_ListStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var count = dict.Count;
// Assert
Assert.Equal(1, count);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Keys_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act
var keys = dict.Keys;
// Assert
Assert.Empty(keys);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Keys_ListStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var keys = dict.Keys;
// Assert
Assert.Equal(new[] { "key" }, keys);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Values_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act
var values = dict.Values;
// Assert
Assert.Empty(values);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Values_ListStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var values = dict.Values;
// Assert
Assert.Equal(new object[] { "value" }, values);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Add_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act
dict.Add("key", "value");
// Assert
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Add_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
dict.Add("", "foo");
// Assert
Assert.Equal("foo", dict[""]);
}
[Fact]
public void Add_ListStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "age", 30 },
};
// Act
dict.Add("key", "value");
// Assert
Assert.Collection(
dict.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("age", kvp.Key); Assert.Equal(30, kvp.Value); },
kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Add_DuplicateKey()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var message = $"An element with the key 'key' already exists in the {nameof(AdaptiveCapacityDictionary<string, string>)}";
// Act & Assert
ExceptionAssert.ThrowsArgument(() => dict.Add("key", "value2"), "key", message);
// Assert
Assert.Collection(
dict.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Add_DuplicateKey_CaseInsensitive()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
var message = $"An element with the key 'kEy' already exists in the {nameof(AdaptiveCapacityDictionary<string, string>)}";
// Act & Assert
ExceptionAssert.ThrowsArgument(() => dict.Add("kEy", "value2"), "key", message);
// Assert
Assert.Collection(
dict.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Add_KeyValuePair()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "age", 30 },
};
// Act
((ICollection<KeyValuePair<string, object?>>)dict).Add(new KeyValuePair<string, object?>("key", "value"));
// Assert
Assert.Collection(
dict.OrderBy(kvp => kvp.Key),
kvp => { Assert.Equal("age", kvp.Key); Assert.Equal(30, kvp.Value); },
kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Clear_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
dict.Clear();
// Assert
Assert.Empty(dict);
}
[Fact]
public void Clear_ListStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
dict.Clear();
// Assert
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
Assert.Null(dict._dictionaryStorage);
}
[Fact]
public void Contains_ListStorage_KeyValuePair_True()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("key", "value");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Contains(input);
// Assert
Assert.True(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Contains_ListStory_KeyValuePair_True_CaseInsensitive()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("KEY", "value");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Contains(input);
// Assert
Assert.True(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Contains_ListStorage_KeyValuePair_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("other", "value");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Contains(input);
// Assert
Assert.False(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
// Value comparisons use the default equality comparer.
[Fact]
public void Contains_ListStorage_KeyValuePair_False_ValueComparisonIsDefault()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("key", "valUE");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Contains(input);
// Assert
Assert.False(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void ContainsKey_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.ContainsKey("key");
// Assert
Assert.False(result);
}
[Fact]
public void ContainsKey_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.ContainsKey("");
// Assert
Assert.False(result);
}
[Fact]
public void ContainsKey_ListStorage_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.ContainsKey("other");
// Assert
Assert.False(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void ContainsKey_ListStorage_True()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.ContainsKey("key");
// Assert
Assert.True(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void ContainsKey_ListStorage_True_CaseInsensitive()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
// Act
var result = dict.ContainsKey("kEy");
// Assert
Assert.True(result);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void CopyTo()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var array = new KeyValuePair<string, object?>[2];
// Act
((ICollection<KeyValuePair<string, object?>>)dict).CopyTo(array, 1);
// Assert
Assert.Equal(
new KeyValuePair<string, object?>[]
{
default(KeyValuePair<string, object?>),
new KeyValuePair<string, object?>("key", "value")
},
array);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyValuePair_True()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("key", "value");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Remove(input);
// Assert
Assert.True(result);
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyValuePair_True_CaseInsensitive()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("KEY", "value");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Remove(input);
// Assert
Assert.True(result);
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyValuePair_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("other", "value");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Remove(input);
// Assert
Assert.False(result);
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
// Value comparisons use the default equality comparer.
[Fact]
public void Remove_KeyValuePair_False_ValueComparisonIsDefault()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
var input = new KeyValuePair<string, object?>("key", "valUE");
// Act
var result = ((ICollection<KeyValuePair<string, object?>>)dict).Remove(input);
// Assert
Assert.False(result);
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.Remove("key");
// Assert
Assert.False(result);
}
[Fact]
public void Remove_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.Remove("");
// Assert
Assert.False(result);
}
[Fact]
public void Remove_ListStorage_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.Remove("other");
// Assert
Assert.False(result);
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_ListStorage_True()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.Remove("key");
// Assert
Assert.True(result);
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_ListStorage_True_CaseInsensitive()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
// Act
var result = dict.Remove("kEy");
// Assert
Assert.True(result);
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyAndOutValue_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.Remove("key", out var removedValue);
// Assert
Assert.False(result);
Assert.Null(removedValue);
}
[Fact]
public void Remove_KeyAndOutValue_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.Remove("", out var removedValue);
// Assert
Assert.False(result);
Assert.Null(removedValue);
}
[Fact]
public void Remove_KeyAndOutValue_ListStorage_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.Remove("other", out var removedValue);
// Assert
Assert.False(result);
Assert.Null(removedValue);
Assert.Collection(dict, kvp => { Assert.Equal("key", kvp.Key); Assert.Equal("value", kvp.Value); });
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyAndOutValue_ListStorage_True()
{
// Arrange
object value = "value";
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", value }
};
// Act
var result = dict.Remove("key", out var removedValue);
// Assert
Assert.True(result);
Assert.Same(value, removedValue);
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyAndOutValue_ListStorage_True_CaseInsensitive()
{
// Arrange
object value = "value";
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", value }
};
// Act
var result = dict.Remove("kEy", out var removedValue);
// Assert
Assert.True(result);
Assert.Same(value, removedValue);
Assert.Empty(dict);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyAndOutValue_ListStorage_KeyExists_First()
{
// Arrange
object value = "value";
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", value },
{ "other", 5 },
{ "dotnet", "rocks" }
};
// Act
var result = dict.Remove("key", out var removedValue);
// Assert
Assert.True(result);
Assert.Same(value, removedValue);
Assert.Equal(2, dict.Count);
Assert.False(dict.ContainsKey("key"));
Assert.True(dict.ContainsKey("other"));
Assert.True(dict.ContainsKey("dotnet"));
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyAndOutValue_ListStorage_KeyExists_Middle()
{
// Arrange
object value = "value";
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "other", 5 },
{ "key", value },
{ "dotnet", "rocks" }
};
// Act
var result = dict.Remove("key", out var removedValue);
// Assert
Assert.True(result);
Assert.Same(value, removedValue);
Assert.Equal(2, dict.Count);
Assert.False(dict.ContainsKey("key"));
Assert.True(dict.ContainsKey("other"));
Assert.True(dict.ContainsKey("dotnet"));
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void Remove_KeyAndOutValue_ListStorage_KeyExists_Last()
{
// Arrange
object value = "value";
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "other", 5 },
{ "dotnet", "rocks" },
{ "key", value }
};
// Act
var result = dict.Remove("key", out var removedValue);
// Assert
Assert.True(result);
Assert.Same(value, removedValue);
Assert.Equal(2, dict.Count);
Assert.False(dict.ContainsKey("key"));
Assert.True(dict.ContainsKey("other"));
Assert.True(dict.ContainsKey("dotnet"));
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void TryAdd_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.TryAdd("", "foo");
// Assert
Assert.True(result);
}
[Fact]
public void TryAdd_EmptyStorage_CanAdd()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act
var result = dict.TryAdd("key", "value");
// Assert
Assert.True(result);
Assert.Collection(
dict._arrayStorage,
kvp => Assert.Equal(new KeyValuePair<string, object?>("key", "value"), kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp));
}
[Fact]
public void TryAdd_ArrayStorage_CanAdd()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key0", "value0" },
};
// Act
var result = dict.TryAdd("key1", "value1");
// Assert
Assert.True(result);
Assert.Collection(
dict._arrayStorage,
kvp => Assert.Equal(new KeyValuePair<string, object?>("key0", "value0"), kvp),
kvp => Assert.Equal(new KeyValuePair<string, object?>("key1", "value1"), kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp));
}
[Fact]
public void TryAdd_ArrayStorage_DoesNotAddWhenKeyIsPresent()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key0", "value0" },
};
// Act
var result = dict.TryAdd("key0", "value1");
// Assert
Assert.False(result);
Assert.Collection(
dict._arrayStorage,
kvp => Assert.Equal(new KeyValuePair<string, object?>("key0", "value0"), kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp),
kvp => Assert.Equal(default, kvp));
}
[Fact]
public void TryGetValue_EmptyStorage()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.TryGetValue("key", out var value);
// Assert
Assert.False(result);
Assert.Null(value);
}
[Fact]
public void TryGetValue_EmptyStringIsAllowed()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, string>();
// Act
var result = dict.TryGetValue("", out var value);
// Assert
Assert.False(result);
Assert.Null(value);
}
[Fact]
public void TryGetValue_ListStorage_False()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.TryGetValue("other", out var value);
// Assert
Assert.False(result);
Assert.Null(value);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void TryGetValue_ListStorage_True()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>()
{
{ "key", "value" },
};
// Act
var result = dict.TryGetValue("key", out var value);
// Assert
Assert.True(result);
Assert.Equal("value", value);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void TryGetValue_ListStorage_True_CaseInsensitive()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase)
{
{ "key", "value" },
};
// Act
var result = dict.TryGetValue("kEy", out var value);
// Assert
Assert.True(result);
Assert.Equal("value", value);
Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
}
[Fact]
public void ListStorage_SwitchesToDictionaryAfter10_Add()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act 1
dict.Add("key", "value");
// Assert 1
var storage = Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
Assert.Equal(10, storage.Length);
// Act 2
dict.Add("key2", "value2");
dict.Add("key3", "value3");
dict.Add("key4", "value4");
dict.Add("key5", "value5");
dict.Add("key6", "value2");
dict.Add("key7", "value3");
dict.Add("key8", "value4");
dict.Add("key9", "value5");
dict.Add("key10", "value2");
dict.Add("key11", "value3");
// Assert 2
Assert.Null(dict._arrayStorage);
Assert.Equal(11, dict.Count);
}
[Fact]
public void ListStorage_SwitchesToDictionaryAfter10_TryAdd()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act 1
dict.TryAdd("key", "value");
// Assert 1
var storage = Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
Assert.Equal(10, storage.Length);
// Act 2
dict.TryAdd("key2", "value2");
dict.TryAdd("key3", "value3");
dict.TryAdd("key4", "value4");
dict.TryAdd("key5", "value5");
dict.TryAdd("key6", "value2");
dict.TryAdd("key7", "value3");
dict.TryAdd("key8", "value4");
dict.TryAdd("key9", "value5");
dict.TryAdd("key10", "value2");
dict.TryAdd("key11", "value3");
// Assert 2
Assert.Null(dict._arrayStorage);
Assert.Equal(11, dict.Count);
}
[Fact]
public void ListStorage_SwitchesToDictionaryAfter10_Index()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
// Act 1
dict["key"] = "value";
// Assert 1
var storage = Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
Assert.Equal(10, storage.Length);
// Act 2
dict["key1"] = "value";
dict["key2"] = "value";
dict["key3"] = "value";
dict["key4"] = "value";
dict["key5"] = "value";
dict["key6"] = "value";
dict["key7"] = "value";
dict["key8"] = "value";
dict["key9"] = "value";
dict["key10"] = "value";
// Assert 2
Assert.Null(dict._arrayStorage);
Assert.Equal(11, dict.Count);
}
[Fact]
public void ListStorage_RemoveAt_RearrangesInnerArray()
{
// Arrange
var dict = new AdaptiveCapacityDictionary<string, object>();
dict.Add("key", "value");
dict.Add("key2", "value2");
dict.Add("key3", "value3");
// Assert 1
var storage = Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
Assert.Equal(3, dict.Count);
// Act
dict.Remove("key2");
// Assert 2
storage = Assert.IsType<KeyValuePair<string, object?>[]>(dict._arrayStorage);
Assert.Equal(2, dict.Count);
Assert.Equal("key", storage[0].Key);
Assert.Equal("value", storage[0].Value);
Assert.Equal("key3", storage[1].Key);
Assert.Equal("value3", storage[1].Value);
}
[Fact]
public void UpgradeToDictionary_KeepsComparer()
{
// Arrange
var comparer = StringComparer.OrdinalIgnoreCase;
var dict = new AdaptiveCapacityDictionary<string, object>(StringComparer.OrdinalIgnoreCase);
for (var i = 0; i < 11; i++)
{
dict[i.ToString(CultureInfo.InvariantCulture)] = i;
}
Assert.NotNull(dict._dictionaryStorage);
Assert.Equal(comparer, dict._dictionaryStorage.Comparer);
dict["K"] = 1;
dict["k"] = 2;
Assert.Equal(2, dict["K"]);
}
[Fact]
public void StartAsDictionary_UsesComparer()
{
// Arrange
var comparer = StringComparer.OrdinalIgnoreCase;
var dict = new AdaptiveCapacityDictionary<string, object>(11, StringComparer.OrdinalIgnoreCase);
Assert.NotNull(dict._dictionaryStorage);
Assert.Equal(comparer, dict._dictionaryStorage.Comparer);
dict["K"] = 1;
dict["k"] = 2;
Assert.Equal(2, dict["K"]);
}
private void AssertEmptyArrayStorage(AdaptiveCapacityDictionary<string, string> value)
{
Assert.Same(Array.Empty<KeyValuePair<string, object?>>(), value._arrayStorage);
}
private class RegularType
{
public bool IsAwesome { get; set; }
public int CoolnessFactor { get; set; }
}
private class Visibility
{
private string? PrivateYo { get; set; }
internal int ItsInternalDealWithIt { get; set; }
public bool IsPublic { get; set; }
}
private class StaticProperty
{
public static bool IsStatic { get; set; }
}
private class SetterOnly
{
private bool _coolSetOnly;
public bool CoolSetOnly { set { _coolSetOnly = value; } }
}
private class Base
{
public bool DerivedProperty { get; set; }
}
private class Derived : Base
{
public bool TotallySweetProperty { get; set; }
}
private class DerivedHiddenProperty : Base
{
public new int DerivedProperty { get; set; }
}
private class IndexerProperty
{
public bool this[string key]
{
get { return false; }
set { }
}
}
private class Address
{
public string? City { get; set; }
public string? State { get; set; }
}
}
}
| |
using System;
namespace AbbyyLS.Payments
{
/// <summary>
/// This structure provides a amount of money in some currency
/// </summary>
public struct Money : IComparable<Money>, IEquatable<Money>
{
/// <summary>
/// amount of money
/// </summary>
public decimal Amount { get; private set; }
/// <summary>
/// currency
/// </summary>
public ICurrency Currency { get; private set; }
/// <summary>
/// creates a amount of money in any currency
/// </summary>
/// <param name="amount">
/// amount of money
/// </param>
/// <param name="currency">
/// currency
/// </param>
public Money(decimal amount, ICurrency currency)
:this()
{
if (amount != 0m && currency == null)
throw new ArgumentNullException("currency");
Amount = amount;
Currency = currency;
}
/// <summary>
/// show amount and character code of currency
/// </summary>
public override string ToString()
{
if (Currency == null)
return "0";
return string.Format ("{0:G} {1}", Amount, Currency.CharCode);
}
/// <summary>
/// contains an integer number of currency units
/// </summary>
public bool IsRounded
{
get
{
if (Currency == null || Currency.MinorUnit == 0m)
return true;
decimal mu = Amount/Currency.MinorUnit;
return decimal.Truncate(mu) == mu;
}
}
/// <summary>
/// the total number minot of unit of currency
/// </summary>
public decimal TotalMinorUnit
{
get
{
if (Currency == null)
return 0m;
if(Currency.MinorUnit == 0m)
throw new InvalidOperationException(string.Format("undefined minor unit in {0} currency", Currency.CharCode));
return Amount/Currency.MinorUnit;
}
}
/// <summary>
/// Returns the largest integer less than or equal to minor of unit of this money.
/// </summary>
/// <returns>
/// a new instance with the required amount or the current instance if the operation is not possible
/// </returns>
public Money FloorMinorUnit()
{
if (Currency == null)
return Zero;
if(Currency.MinorUnit == 0m)
return this;
return new Money(decimal.Floor(Amount/Currency.MinorUnit)*Currency.MinorUnit, Currency);
}
/// <summary>
/// Returns the largest integer less than or equal to this money.
/// </summary>
/// <returns>
/// a new instance with the required amount or the current instance if the operation is not possible
/// </returns>
public Money FloorMajorUnit()
{
if (Currency == null)
return Zero;
return new Money(decimal.Floor(Amount), Currency);
}
/// <summary>
/// Returns the smallest integral value that is greater than or equal to minor of unit of this money.
/// </summary>
/// <returns>
/// a new instance with the required amount or the current instance if the operation is not possible
/// </returns>
public Money CeilingMinorUnit()
{
if (Currency == null)
return Zero;
if(Currency.MinorUnit == 0m)
return this;
return new Money(decimal.Ceiling(Amount/Currency.MinorUnit)*Currency.MinorUnit, Currency);
}
/// <summary>
/// Returns the smallest integral value that is greater than or equal to this money.
/// </summary>
/// <returns>
/// a new instance with the required amount or the current instance if the operation is not possible
/// </returns>
public Money CeilingMajorUnit()
{
if (Currency == null)
return Zero;
return new Money(decimal.Ceiling(Amount), Currency);
}
/// <summary>
/// mutiply amount
/// </summary>
public static Money operator *(Money lhs, decimal rhs)
{
return new Money(rhs * lhs.Amount, lhs.Currency);
}
/// <summary>
/// mutiply amount
/// </summary>
public static Money operator *(decimal lhs, Money rhs)
{
return new Money(lhs * rhs.Amount, rhs.Currency);
}
/// <summary>
/// divide amount
/// </summary>
public static Money operator /(Money lhs, decimal rhs)
{
return new Money(lhs.Amount / rhs, lhs.Currency);
}
/// <summary>
/// Determines whether the specified System.Object is equal to the current Money.
/// </summary>
public override bool Equals(object obj)
{
if (!(obj is Money))
return false;
return Equals((Money)obj);
}
/// <summary>
/// Determines whether the specified Money is equal to the current Money.
/// </summary>
public bool Equals(Money other)
{
if (Amount == 0m && other.Amount == 0m)
return true;
return Amount == other.Amount &&
Currency == other.Currency;
}
/// <summary>
/// Compares two Money structures.
/// </summary>
public static bool operator ==(Money x, Money y)
{
return x.Equals(y);
}
/// <summary>
/// Compares two Money structures.
/// </summary>
public static bool operator !=(Money x, Money y)
{
return !x.Equals(y);
}
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns></returns>
public override int GetHashCode()
{
return Amount.GetHashCode() ^ ((Currency == null) ? 0 : Currency.GetHashCode());
}
/// <summary>
/// Compares this instance to a specified AbbyyLS.Payments.Money object and returns a
/// comparison of their relative values.
/// </summary>
public int CompareTo(Money other)
{
if(Currency == null)
return 0m.CompareTo(other.Amount);
if (other.Currency == null)
return Amount.CompareTo(0m);
if (Currency != other.Currency)
throw new InvalidOperationException("mismatch currency");
return Amount.CompareTo(other.Amount);
}
/// <summary>
/// operator Less
/// </summary>
public static bool operator <(Money lhs, Money rhs)
{
return lhs.CompareTo(rhs) < 0;
}
/// <summary>
/// operator Less or Equal
/// </summary>
public static bool operator <=(Money lhs, Money rhs)
{
return lhs.CompareTo(rhs) <= 0;
}
/// <summary>
/// operator Great
/// </summary>
public static bool operator >(Money lhs, Money rhs)
{
return lhs.CompareTo(rhs) > 0;
}
/// <summary>
/// operator Greate or Equal
/// </summary>
public static bool operator >=(Money lhs, Money rhs)
{
return lhs.CompareTo(rhs) >= 0;
}
/// <summary>
/// sum amount
/// </summary>
public static Money operator +(Money lhs, Money rhs)
{
if (lhs.Currency == null)
return rhs;
if (rhs.Currency == null)
return lhs;
if (lhs.Currency != rhs.Currency)
throw new InvalidOperationException("mismatch currency");
return new Money(lhs.Amount + rhs.Amount, lhs.Currency);
}
/// <summary>
/// subtraction amount
/// </summary>
public static Money operator -(Money lhs, Money rhs)
{
if (rhs.Currency == null)
return lhs;
if (lhs.Currency == null)
return new Money(- rhs.Amount, rhs.Currency);
if (lhs.Currency != rhs.Currency)
throw new InvalidOperationException("mismatch currency");
return new Money(lhs.Amount - rhs.Amount, lhs.Currency);
}
/// <summary>
/// Default value
/// </summary>
public static readonly Money Zero = new Money();
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using java.lang;
using org.objectweb.asm;
namespace stab.reflection {
public abstract class Instruction {
final static Instruction Aaload = new SimpleInstruction(Opcode.Aaload);
final static Instruction Aastore = new SimpleInstruction(Opcode.Aastore);
final static Instruction Aconst_Null = new SimpleInstruction(Opcode.Aconst_Null);
final static Instruction Areturn = new SimpleInstruction(Opcode.Areturn);
final static Instruction Arraylength = new SimpleInstruction(Opcode.Arraylength);
final static Instruction Athrow = new SimpleInstruction(Opcode.Athrow);
final static Instruction Baload = new SimpleInstruction(Opcode.Baload);
final static Instruction Bastore = new SimpleInstruction(Opcode.Bastore);
final static Instruction Caload = new SimpleInstruction(Opcode.Caload);
final static Instruction Castore = new SimpleInstruction(Opcode.Castore);
final static Instruction D2f = new SimpleInstruction(Opcode.D2f);
final static Instruction D2i = new SimpleInstruction(Opcode.D2i);
final static Instruction D2l = new SimpleInstruction(Opcode.D2l);
final static Instruction Dadd = new SimpleInstruction(Opcode.Dadd);
final static Instruction Daload = new SimpleInstruction(Opcode.Daload);
final static Instruction Dastore = new SimpleInstruction(Opcode.Dastore);
final static Instruction Dcmpg = new SimpleInstruction(Opcode.Dcmpg);
final static Instruction Dcmpl = new SimpleInstruction(Opcode.Dcmpl);
final static Instruction Dconst_0 = new SimpleInstruction(Opcode.Dconst_0);
final static Instruction Dconst_1 = new SimpleInstruction(Opcode.Dconst_1);
final static Instruction Ddiv = new SimpleInstruction(Opcode.Ddiv);
final static Instruction Dmul = new SimpleInstruction(Opcode.Dmul);
final static Instruction Dneg = new SimpleInstruction(Opcode.Dneg);
final static Instruction Drem = new SimpleInstruction(Opcode.Drem);
final static Instruction Dreturn = new SimpleInstruction(Opcode.Dreturn);
final static Instruction Dsub = new SimpleInstruction(Opcode.Dsub);
final static Instruction Dup = new SimpleInstruction(Opcode.Dup);
final static Instruction Dup_X1 = new SimpleInstruction(Opcode.Dup_X1);
final static Instruction Dup_X2 = new SimpleInstruction(Opcode.Dup_X2);
final static Instruction Dup2 = new SimpleInstruction(Opcode.Dup2);
final static Instruction Dup2_X1 = new SimpleInstruction(Opcode.Dup2_X1);
final static Instruction Dup2_X2 = new SimpleInstruction(Opcode.Dup2_X2);
final static Instruction F2d = new SimpleInstruction(Opcode.F2d);
final static Instruction F2i = new SimpleInstruction(Opcode.F2i);
final static Instruction F2l = new SimpleInstruction(Opcode.F2l);
final static Instruction Fadd = new SimpleInstruction(Opcode.Fadd);
final static Instruction Faload = new SimpleInstruction(Opcode.Faload);
final static Instruction Fastore = new SimpleInstruction(Opcode.Fastore);
final static Instruction Fcmpg = new SimpleInstruction(Opcode.Fcmpg);
final static Instruction Fcmpl = new SimpleInstruction(Opcode.Fcmpl);
final static Instruction Fconst_0 = new SimpleInstruction(Opcode.Fconst_0);
final static Instruction Fconst_1 = new SimpleInstruction(Opcode.Fconst_1);
final static Instruction Fconst_2 = new SimpleInstruction(Opcode.Fconst_2);
final static Instruction Fdiv = new SimpleInstruction(Opcode.Fdiv);
final static Instruction Fmul = new SimpleInstruction(Opcode.Fmul);
final static Instruction Fneg = new SimpleInstruction(Opcode.Fneg);
final static Instruction Frem = new SimpleInstruction(Opcode.Frem);
final static Instruction Freturn = new SimpleInstruction(Opcode.Freturn);
final static Instruction Fsub = new SimpleInstruction(Opcode.Fsub);
final static Instruction I2b = new SimpleInstruction(Opcode.I2b);
final static Instruction I2c = new SimpleInstruction(Opcode.I2c);
final static Instruction I2d = new SimpleInstruction(Opcode.I2d);
final static Instruction I2f = new SimpleInstruction(Opcode.I2f);
final static Instruction I2l = new SimpleInstruction(Opcode.I2l);
final static Instruction I2s = new SimpleInstruction(Opcode.I2s);
final static Instruction Iadd = new SimpleInstruction(Opcode.Iadd);
final static Instruction Iaload = new SimpleInstruction(Opcode.Iaload);
final static Instruction Iand = new SimpleInstruction(Opcode.Iand);
final static Instruction Iastore = new SimpleInstruction(Opcode.Iastore);
final static Instruction Iconst_0 = new SimpleInstruction(Opcode.Iconst_0);
final static Instruction Iconst_1 = new SimpleInstruction(Opcode.Iconst_1);
final static Instruction Iconst_2 = new SimpleInstruction(Opcode.Iconst_2);
final static Instruction Iconst_3 = new SimpleInstruction(Opcode.Iconst_3);
final static Instruction Iconst_4 = new SimpleInstruction(Opcode.Iconst_4);
final static Instruction Iconst_5 = new SimpleInstruction(Opcode.Iconst_5);
final static Instruction Iconst_M1 = new SimpleInstruction(Opcode.Iconst_M1);
final static Instruction Idiv = new SimpleInstruction(Opcode.Idiv);
final static Instruction Imul = new SimpleInstruction(Opcode.Imul);
final static Instruction Ineg = new SimpleInstruction(Opcode.Ineg);
final static Instruction Ior = new SimpleInstruction(Opcode.Ior);
final static Instruction Irem = new SimpleInstruction(Opcode.Irem);
final static Instruction Ireturn = new SimpleInstruction(Opcode.Ireturn);
final static Instruction Ishl = new SimpleInstruction(Opcode.Ishl);
final static Instruction Ishr = new SimpleInstruction(Opcode.Ishr);
final static Instruction Isub = new SimpleInstruction(Opcode.Isub);
final static Instruction Iushr = new SimpleInstruction(Opcode.Iushr);
final static Instruction Ixor = new SimpleInstruction(Opcode.Ixor);
final static Instruction L2d = new SimpleInstruction(Opcode.L2d);
final static Instruction L2f = new SimpleInstruction(Opcode.L2f);
final static Instruction L2i = new SimpleInstruction(Opcode.L2i);
final static Instruction Ladd = new SimpleInstruction(Opcode.Ladd);
final static Instruction Laload = new SimpleInstruction(Opcode.Laload);
final static Instruction Land = new SimpleInstruction(Opcode.Land);
final static Instruction Lastore = new SimpleInstruction(Opcode.Lastore);
final static Instruction Lcmp = new SimpleInstruction(Opcode.Lcmp);
final static Instruction Lconst_0 = new SimpleInstruction(Opcode.Lconst_0);
final static Instruction Lconst_1 = new SimpleInstruction(Opcode.Lconst_1);
final static Instruction Ldiv = new SimpleInstruction(Opcode.Ldiv);
final static Instruction Lmul = new SimpleInstruction(Opcode.Lmul);
final static Instruction Lneg = new SimpleInstruction(Opcode.Lneg);
final static Instruction Lor = new SimpleInstruction(Opcode.Lor);
final static Instruction Lrem = new SimpleInstruction(Opcode.Lrem);
final static Instruction Lreturn = new SimpleInstruction(Opcode.Lreturn);
final static Instruction Lshl = new SimpleInstruction(Opcode.Lshl);
final static Instruction Lshr = new SimpleInstruction(Opcode.Lshr);
final static Instruction Lsub = new SimpleInstruction(Opcode.Lsub);
final static Instruction Lushr = new SimpleInstruction(Opcode.Lushr);
final static Instruction Lxor = new SimpleInstruction(Opcode.Lxor);
final static Instruction Monitorenter = new SimpleInstruction(Opcode.Monitorenter);
final static Instruction Monitorexit = new SimpleInstruction(Opcode.Monitorexit);
final static Instruction Nop = new SimpleInstruction(Opcode.Nop);
final static Instruction Pop = new SimpleInstruction(Opcode.Pop);
final static Instruction Pop2 = new SimpleInstruction(Opcode.Pop2);
final static Instruction Return = new SimpleInstruction(Opcode.Return);
final static Instruction Saload = new SimpleInstruction(Opcode.Saload);
final static Instruction Sastore = new SimpleInstruction(Opcode.Sastore);
final static Instruction Swap = new SimpleInstruction(Opcode.Swap);
final static Instruction Aload_0 = new LocalVariableInstruction(Opcode.Aload, 0);
final static Instruction Aload_1 = new LocalVariableInstruction(Opcode.Aload, 1);
final static Instruction Aload_2 = new LocalVariableInstruction(Opcode.Aload, 2);
final static Instruction Aload_3 = new LocalVariableInstruction(Opcode.Aload, 3);
final static Instruction Fload_0 = new LocalVariableInstruction(Opcode.Fload, 0);
final static Instruction Fload_1 = new LocalVariableInstruction(Opcode.Fload, 1);
final static Instruction Fload_2 = new LocalVariableInstruction(Opcode.Fload, 2);
final static Instruction Fload_3 = new LocalVariableInstruction(Opcode.Fload, 3);
final static Instruction Dload_0 = new LocalVariableInstruction(Opcode.Dload, 0);
final static Instruction Dload_1 = new LocalVariableInstruction(Opcode.Dload, 1);
final static Instruction Dload_2 = new LocalVariableInstruction(Opcode.Dload, 2);
final static Instruction Dload_3 = new LocalVariableInstruction(Opcode.Dload, 3);
final static Instruction Iload_0 = new LocalVariableInstruction(Opcode.Iload, 0);
final static Instruction Iload_1 = new LocalVariableInstruction(Opcode.Iload, 1);
final static Instruction Iload_2 = new LocalVariableInstruction(Opcode.Iload, 2);
final static Instruction Iload_3 = new LocalVariableInstruction(Opcode.Iload, 3);
final static Instruction Lload_0 = new LocalVariableInstruction(Opcode.Lload, 0);
final static Instruction Lload_1 = new LocalVariableInstruction(Opcode.Lload, 1);
final static Instruction Lload_2 = new LocalVariableInstruction(Opcode.Lload, 2);
final static Instruction Lload_3 = new LocalVariableInstruction(Opcode.Lload, 3);
final static Instruction Astore_0 = new LocalVariableInstruction(Opcode.Astore, 0);
final static Instruction Astore_1 = new LocalVariableInstruction(Opcode.Astore, 1);
final static Instruction Astore_2 = new LocalVariableInstruction(Opcode.Astore, 2);
final static Instruction Astore_3 = new LocalVariableInstruction(Opcode.Astore, 3);
final static Instruction Fstore_0 = new LocalVariableInstruction(Opcode.Fstore, 0);
final static Instruction Fstore_1 = new LocalVariableInstruction(Opcode.Fstore, 1);
final static Instruction Fstore_2 = new LocalVariableInstruction(Opcode.Fstore, 2);
final static Instruction Fstore_3 = new LocalVariableInstruction(Opcode.Fstore, 3);
final static Instruction Dstore_0 = new LocalVariableInstruction(Opcode.Dstore, 0);
final static Instruction Dstore_1 = new LocalVariableInstruction(Opcode.Dstore, 1);
final static Instruction Dstore_2 = new LocalVariableInstruction(Opcode.Dstore, 2);
final static Instruction Dstore_3 = new LocalVariableInstruction(Opcode.Dstore, 3);
final static Instruction Istore_0 = new LocalVariableInstruction(Opcode.Istore, 0);
final static Instruction Istore_1 = new LocalVariableInstruction(Opcode.Istore, 1);
final static Instruction Istore_2 = new LocalVariableInstruction(Opcode.Istore, 2);
final static Instruction Istore_3 = new LocalVariableInstruction(Opcode.Istore, 3);
final static Instruction Lstore_0 = new LocalVariableInstruction(Opcode.Lstore, 0);
final static Instruction Lstore_1 = new LocalVariableInstruction(Opcode.Lstore, 1);
final static Instruction Lstore_2 = new LocalVariableInstruction(Opcode.Lstore, 2);
final static Instruction Lstore_3 = new LocalVariableInstruction(Opcode.Lstore, 3);
final static Instruction Newarray_4 = new IntOperandInstruction(Opcode.Newarray, 4);
final static Instruction Newarray_5 = new IntOperandInstruction(Opcode.Newarray, 5);
final static Instruction Newarray_6 = new IntOperandInstruction(Opcode.Newarray, 6);
final static Instruction Newarray_7 = new IntOperandInstruction(Opcode.Newarray, 7);
final static Instruction Newarray_8 = new IntOperandInstruction(Opcode.Newarray, 8);
final static Instruction Newarray_9 = new IntOperandInstruction(Opcode.Newarray, 9);
final static Instruction Newarray_10 = new IntOperandInstruction(Opcode.Newarray, 10);
final static Instruction Newarray_11 = new IntOperandInstruction(Opcode.Newarray, 11);
protected Instruction(Opcode opcode) {
this.Opcode = opcode;
}
public Opcode Opcode^;
public virtual int LocalVariable {
get {
throw new UnsupportedOperationException();
}
}
public virtual int IntOperand {
get {
throw new UnsupportedOperationException();
}
}
public virtual int Increment {
get {
throw new UnsupportedOperationException();
}
}
public virtual LabelMarker LabelMarker {
get {
throw new UnsupportedOperationException();
}
}
public virtual Object ConstantValue {
get {
throw new UnsupportedOperationException();
}
}
public virtual FieldInfo Field {
get {
throw new UnsupportedOperationException();
}
}
public virtual MethodInfo Method {
get {
throw new UnsupportedOperationException();
}
}
public virtual TypeInfo Type {
get {
throw new UnsupportedOperationException();
}
}
public virtual int MinimumKey {
get {
throw new UnsupportedOperationException();
}
}
public virtual int MaximumKey {
get {
throw new UnsupportedOperationException();
}
}
public virtual LabelMarker[] Labels {
get {
throw new UnsupportedOperationException();
}
}
public virtual LabelMarker DefaultLabel {
get {
throw new UnsupportedOperationException();
}
}
public virtual int[] Keys {
get {
throw new UnsupportedOperationException();
}
}
public virtual int Dimensions {
get {
throw new UnsupportedOperationException();
}
}
virtual Label Label {
get {
throw new UnsupportedOperationException();
}
}
abstract void accept(MethodVisitor visitor);
}
public class LabelMarker : Instruction {
private Label label;
LabelMarker(Label label)
: super(Opcode.LabelMarker) {
this.label = label;
}
override Label Label {
get {
return label;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitLabel(label);
}
}
class SimpleInstruction : Instruction {
SimpleInstruction(Opcode opcode)
: super(opcode) {
}
override void accept(MethodVisitor visitor) {
visitor.visitInsn(this.Opcode.Value);
}
}
class LocalVariableInstruction : Instruction {
private int localVariable;
LocalVariableInstruction(Opcode opcode, int localVariable)
: super(opcode) {
this.localVariable = localVariable;
}
public override int LocalVariable {
get {
return localVariable;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitVarInsn(this.Opcode.Value, localVariable);
}
}
class IntOperandInstruction : Instruction {
private int intOperand;
IntOperandInstruction(Opcode opcode, int intOperand)
: super(opcode) {
this.intOperand = intOperand;
}
public override int IntOperand {
get {
return intOperand;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitIntInsn(this.Opcode.Value, intOperand);
}
}
class IncrementInstruction : Instruction {
private int localVariable;
private int increment;
IncrementInstruction(int localVariable, int increment)
: super(Opcode.Iinc) {
this.localVariable = localVariable;
this.increment = increment;
}
public override int LocalVariable {
get {
return localVariable;
}
}
public override int Increment {
get {
return increment;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitIincInsn(localVariable, increment);
}
}
class JumpInstruction : Instruction {
private LabelMarker labelMarker;
JumpInstruction(Opcode opcode, LabelMarker labelMarker)
: super(opcode) {
this.labelMarker = labelMarker;
}
public override LabelMarker LabelMarker {
get {
return labelMarker;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitJumpInsn(this.Opcode.Value, labelMarker.Label);
}
}
class LoadConstantInstruction : Instruction {
private Object constantValue;
LoadConstantInstruction(Object constantValue)
: super(Opcode.Ldc) {
this.constantValue = constantValue;
}
override void accept(MethodVisitor visitor) {
visitor.visitLdcInsn(constantValue);
}
}
class FieldInstruction : Instruction {
private FieldInfo field;
FieldInstruction(Opcode opcode, FieldInfo field)
: super(opcode) {
this.field = field;
}
public override FieldInfo Field {
get {
return field;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitFieldInsn(this.Opcode.Value, field.DeclaringType.FullName, field.Name, field.Descriptor);
}
}
class MethodInstruction : Instruction {
private MethodInfo method;
MethodInstruction(Opcode opcode, MethodInfo method)
: super(opcode) {
this.method = method;
}
public override MethodInfo Method {
get {
return method;
}
}
override void accept(MethodVisitor visitor) {
visitor.visitMethodInsn(this.Opcode.Value, method.DeclaringType.FullName, method.Name, method.Descriptor);
}
}
class TypeInstruction : Instruction {
private TypeInfo type;
TypeInstruction(Opcode opcode, TypeInfo type)
: super(opcode) {
this.type = type;
}
public override TypeInfo Type {
get {
return type;
}
}
override void accept(MethodVisitor visitor) {
if (type.IsGenericParameter) {
var desc = type.Descriptor;
visitor.visitTypeInsn(this.Opcode.Value, desc.substring(1, desc.length() - 1));
} else {
visitor.visitTypeInsn(this.Opcode.Value, type.FullName);
}
}
}
class LookupSwitchInstruction : Instruction {
private int[] keys;
private LabelMarker defaultLabel;
private LabelMarker[] labels;
LookupSwitchInstruction(int[] keys, LabelMarker[] labels, LabelMarker defaultLabel)
: super(Opcode.Lookupswitch) {
this.keys = keys;
this.labels = labels;
this.defaultLabel = defaultLabel;
}
public override int[] Keys {
get {
return keys;
}
}
public override LabelMarker[] Labels {
get {
return labels;
}
}
public override LabelMarker DefaultLabel {
get {
return defaultLabel;
}
}
override void accept(MethodVisitor visitor) {
var t = new Label[sizeof(labels)];
for (int i = 0; i < sizeof(t); i++) {
t[i] = labels[i].Label;
}
visitor.visitLookupSwitchInsn(defaultLabel.Label, keys, t);
}
}
class TableSwitchInstruction : Instruction {
private int minimumKey;
private int maximumKey;
private LabelMarker[] labels;
private LabelMarker defaultLabel;
TableSwitchInstruction(int minimumKey, int maximumKey, LabelMarker[] labels, LabelMarker defaultLabel)
: super(Opcode.Tableswitch) {
this.minimumKey = minimumKey;
this.maximumKey = maximumKey;
this.labels = labels;
this.defaultLabel = defaultLabel;
}
public override int MinimumKey {
get {
return minimumKey;
}
}
public override int MaximumKey {
get {
return maximumKey;
}
}
public override LabelMarker[] Labels {
get {
return labels;
}
}
public override LabelMarker DefaultLabel {
get {
return defaultLabel;
}
}
override void accept(MethodVisitor visitor) {
var t = new Label[sizeof(labels)];
for (int i = 0; i < sizeof(t); i++) {
t[i] = labels[i].Label;
}
visitor.visitTableSwitchInsn(minimumKey, maximumKey, defaultLabel.Label, t);
}
}
class MultiNewArrayInstruction : Instruction {
private TypeInfo type;
private int dimensions;
MultiNewArrayInstruction(TypeInfo type, int dimensions)
: super(Opcode.Multianewarray) {
this.type = type;
this.dimensions = dimensions;
}
override void accept(MethodVisitor visitor) {
visitor.visitMultiANewArrayInsn(type.FullName, dimensions);
}
}
}
| |
using System.Text.RegularExpressions;
using System.Globalization;
namespace Signum.Entities.UserAssets;
public class SmartDateTimeFilterValueConverter : IFilterValueConverter
{
public class SmartDateTimeSpan
{
static Regex partRegex = new Regex(@"^((\+\d+)|(-\d+)|(\d+))$");
static Regex dayComplexRegex = new Regex(@"^(?<text>sun|mon|tue|wed|thu|fri|sat|max)(?<inc>[+-]\d+)?$", RegexOptions.IgnoreCase);
static Regex regex = new Regex(@"^(?<year>.+)/(?<month>.+)/(?<day>.+) (?<hour>.+):(?<minute>.+):(?<second>.+)$", RegexOptions.IgnoreCase);
public string Year;
public string Month;
public string Day;
public string Hour;
public string Minute;
public string Second;
public static Result<SmartDateTimeSpan>? TryParse(string? str)
{
if (string.IsNullOrEmpty(str))
{
return null;
}
Match match = regex.Match(str);
if (!match.Success)
return new Result<SmartDateTimeSpan>.Error("Invalid Format: yyyy/mm/dd hh:mm:ss");
var span = new SmartDateTimeSpan();
string? error =
Assert(match, "year", "yyyy", 0, int.MaxValue, out span.Year) ??
Assert(match, "month", "mm", 1, 12, out span.Month) ??
Assert(match, "day", "dd", 1, 31, out span.Day) ??
Assert(match, "hour", "hh", 0, 23, out span.Hour) ??
Assert(match, "minute", "mm", 0, 59, out span.Minute) ??
Assert(match, "second", "ss", 0, 59, out span.Second);
if (error.HasText())
return new Result<SmartDateTimeSpan>.Error(error);
return new Result<SmartDateTimeSpan>.Success(span);
}
static string? Assert(Match m, string groupName, string defaultValue, int minValue, int maxValue, out string result)
{
result = m.Groups[groupName].Value;
if (string.IsNullOrEmpty(result))
return "{0} has no value".FormatWith(groupName);
if (defaultValue == result)
return null;
if (partRegex.IsMatch(result))
{
if (result.Contains("+") || result.Contains("-"))
return null;
int val = int.Parse(result);
if (minValue <= val && val <= maxValue)
return null;
return "{0} must be between {1} and {2}".FormatWith(groupName, minValue, maxValue);
}
if (groupName == "day" && dayComplexRegex.IsMatch(result))
return null;
string options = new[] { defaultValue, "const", "+inc", "-dec", groupName == "day" ? "(max|sun|mon|tue|wed|fri|sat|)(+inc|-dec)?" : null }.NotNull().Comma(" or ");
return "'{0}' is not a valid {1}. Try {2} instead".FormatWith(result, groupName, options);
}
public DateTime ToDateTime()
{
DateTime now = Clock.Now;
int year = Mix(now.Year, Year, "yyyy");
int month = Mix(now.Month, Month, "mm");
int day;
var m = dayComplexRegex.Match(Day);
if (m.Success)
{
var text = m.Groups["text"].Value.ToLower();
var inc = m.Groups["inc"].Value?.ToLower();
if (text == "max")
{
year += MonthDivMod(ref month);
day = DateTime.DaysInMonth(year, month);
}
else
{
var dayOfWeek =
text == "sun" ? DayOfWeek.Sunday :
text == "mon" ? DayOfWeek.Monday :
text == "tue" ? DayOfWeek.Tuesday :
text == "wed" ? DayOfWeek.Wednesday :
text == "thu" ? DayOfWeek.Thursday :
text == "fri" ? DayOfWeek.Friday :
text == "sat" ? DayOfWeek.Saturday :
throw new InvalidOperationException("Unexpected text: " + text);
year += MonthDivMod(ref month);
var date = new DateTime(year, month, now.Day).WeekStart().AddDays(((int)dayOfWeek - (int)CultureInfo.CurrentCulture.DateTimeFormat.FirstDayOfWeek));
if(inc.HasText())
{
date = date.AddDays(int.Parse(inc));
}
year = date.Year;
month = date.Month;
day = date.Day;
}
}
else
{
day = Mix(now.Day, Day, "dd");
}
int hour = Mix(now.Hour, Hour, "hh");
int minute = Mix(now.Minute, Minute, "mm");
int second = Mix(now.Second, Second, "ss");
minute += second.DivMod(60, out second);
hour += minute.DivMod(60, out minute);
day += hour.DivMod(24, out hour);
DateDivMod(ref year, ref month, ref day);
return new DateTime(year, month, day, hour, minute, second);
}
private static void DateDivMod(ref int year, ref int month, ref int day)
{
year += MonthDivMod(ref month); // We need right month for DaysInMonth
int daysInMonth;
while (day > (daysInMonth = DateTime.DaysInMonth(year, month)))
{
day -= daysInMonth;
month++;
year += MonthDivMod(ref month);
}
while (day <= 0)
{
month--;
year += MonthDivMod(ref month);
day += DateTime.DaysInMonth(year, month);
}
}
private static int MonthDivMod(ref int month)
{
int year = 0;
while (12 < month)
{
year++;
month -= 12;
}
while (month <= 0)
{
year--;
month += 12;
}
return year;
}
static int Mix(int current, string rule, string pattern)
{
if (string.Equals(rule, pattern, StringComparison.InvariantCultureIgnoreCase))
return current;
if (rule.StartsWith("+"))
return current + int.Parse(rule.Substring(1));
if (rule.StartsWith("-"))
return current - int.Parse(rule.Substring(1));
return int.Parse(rule);
}
public static SmartDateTimeSpan Substract(DateTime date, DateTime now)
{
var ss = new SmartDateTimeSpan
{
Year = Diference(now.Year - date.Year, "yyyy") ?? date.Year.ToString("0000"),
Month = Diference(now.Month - date.Month, "mm") ?? date.Month.ToString("00"),
Day = date.Day == DateTime.DaysInMonth(date.Year, date.Month) ? "max" : (Diference(now.Day - date.Day, "dd") ?? date.Day.ToString("00")),
};
if (date == date.Date)
{
ss.Hour = ss.Minute = ss.Second = "00";
}
else
{
ss.Hour = Diference(now.Hour - date.Hour, "hh") ?? date.Hour.ToString("00");
ss.Minute = Diference(now.Minute - date.Minute, "mm") ?? date.Minute.ToString("00");
ss.Second = Diference(now.Second - date.Second, "ss") ?? date.Second.ToString("00");
}
return ss;
}
public static SmartDateTimeSpan Simple(DateTime date)
{
return new SmartDateTimeSpan
{
Year = date.Year.ToString("0000"),
Month = date.Month.ToString("00"),
Day = date.Day.ToString("00"),
Hour = date.Hour.ToString("00"),
Minute = date.Minute.ToString("00"),
Second = date.Second.ToString("00")
};
}
static string? Diference(int diference, string pattern)
{
if (diference == 0)
return pattern;
if (diference == +1)
return "-1";
if (diference == -1)
return "+1";
return null;
}
public override string ToString()
{
return "{0}/{1}/{2} {3}:{4}:{5}".FormatWith(Year, Month, Day, Hour, Minute, Second);
}
}
public Result<string?>? TryToStringValue(object? value, Type type)
{
if (value == null)
return null;
DateTime dateTime =
value is string s ? DateTime.ParseExact(s, type == typeof(DateTime) ? "o" : "yyyy-MM-dd", CultureInfo.InvariantCulture) :
value is DateOnly d ? d.ToDateTime():
value is DateTime dt ? dt : throw new UnexpectedValueException(value);
SmartDateTimeSpan ss = SmartDateTimeSpan.Substract(dateTime, Clock.Now);
return new Result<string?>.Success(ss.ToString());
}
public Result<object?>? TryParseValue(string? value, Type type)
{
var res = SmartDateTimeSpan.TryParse(value);
if (res == null)
return null;
if (res is Result<SmartDateTimeSpan>.Error e)
return new Result<object?>.Error(e.ErrorText);
if (res is Result<SmartDateTimeSpan>.Success s)
return new Result<object?>.Success(type.UnNullify() == typeof(DateOnly) ? (object)s.Value.ToDateTime().ToDateOnly() : (object)s.Value.ToDateTime());
throw new UnexpectedValueException(res);
}
}
| |
//
// FrameBackend.cs
//
// Author:
// Lluis Sanchez <lluis@xamarin.com>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using Xwt.Backends;
using Xwt.Drawing;
namespace Xwt.GtkBackend
{
public class FrameBackend: WidgetBackend, IFrameBackend
{
Gtk.Alignment paddingAlign;
string label;
Color? borderColor;
public FrameBackend ()
{
Widget = new Gtk.Frame ();
Widget.Show ();
}
protected new Gtk.Bin Widget {
get { return (Gtk.Bin)base.Widget; }
set { base.Widget = value; }
}
protected new IFrameEventSink EventSink {
get { return (IFrameEventSink)base.EventSink; }
}
#region IFrameBackend implementation
public void SetContent (IWidgetBackend child)
{
Gtk.Bin parent = paddingAlign != null ? paddingAlign : Widget;
if (parent.Child != null) {
RemoveChildPlacement (parent.Child);
parent.Remove (parent.Child);
}
if (child != null) {
var w = GetWidgetWithPlacement (child);
parent.Child = w;
} else {
parent.Child = null;
}
}
public void SetFrameType (FrameType type)
{
Frame f = (Frame) Frontend;
switch (type) {
case FrameType.Custom:
if (!(Widget is HeaderBox)) {
HeaderBox box = new HeaderBox ();
box.Show ();
box.BackgroundColor = UsingCustomBackgroundColor ? (Color?)BackgroundColor : null;
box.SetMargins ((int)f.BorderWidthTop, (int)f.BorderWidthBottom, (int)f.BorderWidthLeft, (int)f.BorderWidthRight);
box.SetPadding ((int)f.Padding.Top, (int)f.Padding.Bottom, (int)f.Padding.Left, (int)f.Padding.Right);
if (borderColor != null)
box.SetBorderColor (borderColor.Value);
var c = paddingAlign != null ? paddingAlign.Child : Widget.Child;
if (c != null) {
((Gtk.Container)c.Parent).Remove (c);
box.Add (c);
}
Widget = box;
if (paddingAlign != null) {
paddingAlign.Destroy ();
paddingAlign = null;
}
}
break;
case FrameType.WidgetBox:
if (!(Widget is Gtk.Frame)) {
var c = Widget.Child;
if (c != null)
Widget.Remove (c);
Gtk.Frame gf = new Gtk.Frame ();
if (!string.IsNullOrEmpty (label))
gf.Label = label;
if (f.Padding.HorizontalSpacing != 0 || f.Padding.VerticalSpacing != 0) {
paddingAlign = new Gtk.Alignment (0, 0, 1, 1);
paddingAlign.Show ();
UreatePaddingAlign (f.Padding.Top, f.Padding.Bottom, f.Padding.Left, f.Padding.Right);
if (c != null)
paddingAlign.Add (c);
gf.Add (paddingAlign);
} else {
if (c != null)
gf.Add (c);
}
gf.Show ();
Widget = gf;
}
break;
}
}
void UreatePaddingAlign (double top, double bottom, double left, double right)
{
paddingAlign.TopPadding = (uint) top;
paddingAlign.BottomPadding = (uint) bottom;
paddingAlign.LeftPadding = (uint) left;
paddingAlign.RightPadding = (uint) right;
}
public void SetBorderSize (double left, double right, double top, double bottom)
{
HeaderBox hb = Widget as HeaderBox;
if (hb != null) {
hb.SetMargins ((int)top, (int)bottom, (int)left, (int)right);
}
}
public void SetPadding (double left, double right, double top, double bottom)
{
if (Widget is HeaderBox) {
HeaderBox hb = (HeaderBox) Widget;
hb.SetPadding ((int)top, (int)bottom, (int)left, (int)right);
return;
}
if (left == 0 && right == 0 && top == 0 && bottom == 0 && paddingAlign == null)
return;
if (paddingAlign == null) {
paddingAlign = new Gtk.Alignment (0, 0, 1, 1);
paddingAlign.Show ();
var c = Widget.Child;
if (c != null) {
Widget.Remove (c);
paddingAlign.Add (c);
}
Widget.Add (paddingAlign);
}
UreatePaddingAlign (top, bottom, left, right);
}
public Color BorderColor {
get {
if (borderColor == null)
return Widget.Style.Dark (Gtk.StateType.Normal).ToXwtValue ();
else
return borderColor.Value;
}
set {
borderColor = value;
HeaderBox hb = Widget as HeaderBox;
if (hb != null)
hb.SetBorderColor (value);
}
}
public override Color BackgroundColor {
get {
return base.BackgroundColor;
}
set {
base.BackgroundColor = value;
if (Widget is HeaderBox) {
((HeaderBox)Widget).BackgroundColor = value;
}
}
}
public string Label {
get {
return label;
}
set {
label = value;
if (Widget is Gtk.Frame)
((Gtk.Frame)Widget).Label = value;
}
}
#endregion
}
class FrameWidget: Gtk.Frame, IConstraintProvider
{
#if !XWT_GTK3
protected override void OnSizeRequested (ref Gtk.Requisition requisition)
{
base.OnSizeRequested (ref requisition);
}
#endif
public void GetConstraints (Gtk.Widget target, out SizeConstraint width, out SizeConstraint height)
{
width = height = SizeConstraint.Unconstrained;
}
}
public interface IConstraintProvider
{
void GetConstraints (Gtk.Widget target, out SizeConstraint width, out SizeConstraint height);
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System;
using System.Collections.Generic;
using System.Net;
using System.Net.Sockets;
using System.Runtime.InteropServices;
using System.Security.Authentication.ExtendedProtection;
using System.Threading;
internal static partial class Interop
{
internal static partial class HttpApi
{
internal static readonly HTTPAPI_VERSION s_version = new HTTPAPI_VERSION() { HttpApiMajorVersion = 2, HttpApiMinorVersion = 0 };
internal static readonly bool s_supported = InitHttpApi(s_version);
internal static IPEndPoint s_any = new IPEndPoint(IPAddress.Any, IPEndPoint.MinPort);
internal static IPEndPoint s_ipv6Any = new IPEndPoint(IPAddress.IPv6Any, IPEndPoint.MinPort);
internal const int IPv4AddressSize = 16;
internal const int IPv6AddressSize = 28;
private static unsafe bool InitHttpApi(HTTPAPI_VERSION version)
{
uint statusCode = HttpInitialize(version, (uint)HTTP_FLAGS.HTTP_INITIALIZE_SERVER, null);
return statusCode == ERROR_SUCCESS;
}
[StructLayout(LayoutKind.Sequential)]
internal struct HTTP_VERSION
{
internal ushort MajorVersion;
internal ushort MinorVersion;
}
internal enum HTTP_RESPONSE_INFO_TYPE
{
HttpResponseInfoTypeMultipleKnownHeaders,
HttpResponseInfoTypeAuthenticationProperty,
HttpResponseInfoTypeQosProperty,
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_RESPONSE_INFO
{
internal HTTP_RESPONSE_INFO_TYPE Type;
internal uint Length;
internal void* pInfo;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_RESPONSE_HEADERS
{
internal ushort UnknownHeaderCount;
internal HTTP_UNKNOWN_HEADER* pUnknownHeaders;
internal ushort TrailerCount;
internal HTTP_UNKNOWN_HEADER* pTrailers;
internal HTTP_KNOWN_HEADER KnownHeaders;
internal HTTP_KNOWN_HEADER KnownHeaders_02;
internal HTTP_KNOWN_HEADER KnownHeaders_03;
internal HTTP_KNOWN_HEADER KnownHeaders_04;
internal HTTP_KNOWN_HEADER KnownHeaders_05;
internal HTTP_KNOWN_HEADER KnownHeaders_06;
internal HTTP_KNOWN_HEADER KnownHeaders_07;
internal HTTP_KNOWN_HEADER KnownHeaders_08;
internal HTTP_KNOWN_HEADER KnownHeaders_09;
internal HTTP_KNOWN_HEADER KnownHeaders_10;
internal HTTP_KNOWN_HEADER KnownHeaders_11;
internal HTTP_KNOWN_HEADER KnownHeaders_12;
internal HTTP_KNOWN_HEADER KnownHeaders_13;
internal HTTP_KNOWN_HEADER KnownHeaders_14;
internal HTTP_KNOWN_HEADER KnownHeaders_15;
internal HTTP_KNOWN_HEADER KnownHeaders_16;
internal HTTP_KNOWN_HEADER KnownHeaders_17;
internal HTTP_KNOWN_HEADER KnownHeaders_18;
internal HTTP_KNOWN_HEADER KnownHeaders_19;
internal HTTP_KNOWN_HEADER KnownHeaders_20;
internal HTTP_KNOWN_HEADER KnownHeaders_21;
internal HTTP_KNOWN_HEADER KnownHeaders_22;
internal HTTP_KNOWN_HEADER KnownHeaders_23;
internal HTTP_KNOWN_HEADER KnownHeaders_24;
internal HTTP_KNOWN_HEADER KnownHeaders_25;
internal HTTP_KNOWN_HEADER KnownHeaders_26;
internal HTTP_KNOWN_HEADER KnownHeaders_27;
internal HTTP_KNOWN_HEADER KnownHeaders_28;
internal HTTP_KNOWN_HEADER KnownHeaders_29;
internal HTTP_KNOWN_HEADER KnownHeaders_30;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_KNOWN_HEADER
{
internal ushort RawValueLength;
internal sbyte* pRawValue;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_UNKNOWN_HEADER
{
internal ushort NameLength;
internal ushort RawValueLength;
internal sbyte* pName;
internal sbyte* pRawValue;
}
internal enum HTTP_DATA_CHUNK_TYPE : int
{
HttpDataChunkFromMemory = 0,
HttpDataChunkFromFileHandle = 1,
HttpDataChunkFromFragmentCache = 2,
HttpDataChunkMaximum = 3,
}
[StructLayout(LayoutKind.Sequential, Size = 32)]
internal unsafe struct HTTP_DATA_CHUNK
{
internal HTTP_DATA_CHUNK_TYPE DataChunkType;
internal uint p0;
internal byte* pBuffer;
internal uint BufferLength;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_RESPONSE
{
internal uint Flags;
internal HTTP_VERSION Version;
internal ushort StatusCode;
internal ushort ReasonLength;
internal sbyte* pReason;
internal HTTP_RESPONSE_HEADERS Headers;
internal ushort EntityChunkCount;
internal HTTP_DATA_CHUNK* pEntityChunks;
internal ushort ResponseInfoCount;
internal HTTP_RESPONSE_INFO* pResponseInfo;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_REQUEST_INFO
{
internal HTTP_REQUEST_INFO_TYPE InfoType;
internal uint InfoLength;
internal void* pInfo;
}
internal enum HTTP_REQUEST_INFO_TYPE
{
HttpRequestInfoTypeAuth,
HttpRequestInfoTypeChannelBind,
HttpRequestInfoTypeSslProtocol,
HttpRequestInfoTypeSslTokenBinding
}
internal enum HTTP_VERB : int
{
HttpVerbUnparsed = 0,
HttpVerbUnknown = 1,
HttpVerbInvalid = 2,
HttpVerbOPTIONS = 3,
HttpVerbGET = 4,
HttpVerbHEAD = 5,
HttpVerbPOST = 6,
HttpVerbPUT = 7,
HttpVerbDELETE = 8,
HttpVerbTRACE = 9,
HttpVerbCONNECT = 10,
HttpVerbTRACK = 11,
HttpVerbMOVE = 12,
HttpVerbCOPY = 13,
HttpVerbPROPFIND = 14,
HttpVerbPROPPATCH = 15,
HttpVerbMKCOL = 16,
HttpVerbLOCK = 17,
HttpVerbUNLOCK = 18,
HttpVerbSEARCH = 19,
HttpVerbMaximum = 20,
}
[StructLayout(LayoutKind.Sequential)]
internal struct SOCKADDR
{
internal ushort sa_family;
internal byte sa_data;
internal byte sa_data_02;
internal byte sa_data_03;
internal byte sa_data_04;
internal byte sa_data_05;
internal byte sa_data_06;
internal byte sa_data_07;
internal byte sa_data_08;
internal byte sa_data_09;
internal byte sa_data_10;
internal byte sa_data_11;
internal byte sa_data_12;
internal byte sa_data_13;
internal byte sa_data_14;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_TRANSPORT_ADDRESS
{
internal SOCKADDR* pRemoteAddress;
internal SOCKADDR* pLocalAddress;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_REQUEST_HEADERS
{
internal ushort UnknownHeaderCount;
internal HTTP_UNKNOWN_HEADER* pUnknownHeaders;
internal ushort TrailerCount;
internal HTTP_UNKNOWN_HEADER* pTrailers;
internal HTTP_KNOWN_HEADER KnownHeaders;
internal HTTP_KNOWN_HEADER KnownHeaders_02;
internal HTTP_KNOWN_HEADER KnownHeaders_03;
internal HTTP_KNOWN_HEADER KnownHeaders_04;
internal HTTP_KNOWN_HEADER KnownHeaders_05;
internal HTTP_KNOWN_HEADER KnownHeaders_06;
internal HTTP_KNOWN_HEADER KnownHeaders_07;
internal HTTP_KNOWN_HEADER KnownHeaders_08;
internal HTTP_KNOWN_HEADER KnownHeaders_09;
internal HTTP_KNOWN_HEADER KnownHeaders_10;
internal HTTP_KNOWN_HEADER KnownHeaders_11;
internal HTTP_KNOWN_HEADER KnownHeaders_12;
internal HTTP_KNOWN_HEADER KnownHeaders_13;
internal HTTP_KNOWN_HEADER KnownHeaders_14;
internal HTTP_KNOWN_HEADER KnownHeaders_15;
internal HTTP_KNOWN_HEADER KnownHeaders_16;
internal HTTP_KNOWN_HEADER KnownHeaders_17;
internal HTTP_KNOWN_HEADER KnownHeaders_18;
internal HTTP_KNOWN_HEADER KnownHeaders_19;
internal HTTP_KNOWN_HEADER KnownHeaders_20;
internal HTTP_KNOWN_HEADER KnownHeaders_21;
internal HTTP_KNOWN_HEADER KnownHeaders_22;
internal HTTP_KNOWN_HEADER KnownHeaders_23;
internal HTTP_KNOWN_HEADER KnownHeaders_24;
internal HTTP_KNOWN_HEADER KnownHeaders_25;
internal HTTP_KNOWN_HEADER KnownHeaders_26;
internal HTTP_KNOWN_HEADER KnownHeaders_27;
internal HTTP_KNOWN_HEADER KnownHeaders_28;
internal HTTP_KNOWN_HEADER KnownHeaders_29;
internal HTTP_KNOWN_HEADER KnownHeaders_30;
internal HTTP_KNOWN_HEADER KnownHeaders_31;
internal HTTP_KNOWN_HEADER KnownHeaders_32;
internal HTTP_KNOWN_HEADER KnownHeaders_33;
internal HTTP_KNOWN_HEADER KnownHeaders_34;
internal HTTP_KNOWN_HEADER KnownHeaders_35;
internal HTTP_KNOWN_HEADER KnownHeaders_36;
internal HTTP_KNOWN_HEADER KnownHeaders_37;
internal HTTP_KNOWN_HEADER KnownHeaders_38;
internal HTTP_KNOWN_HEADER KnownHeaders_39;
internal HTTP_KNOWN_HEADER KnownHeaders_40;
internal HTTP_KNOWN_HEADER KnownHeaders_41;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_SSL_CLIENT_CERT_INFO
{
internal uint CertFlags;
internal uint CertEncodedSize;
internal byte* pCertEncoded;
internal void* Token;
internal byte CertDeniedByMapper;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_SSL_INFO
{
internal ushort ServerCertKeySize;
internal ushort ConnectionKeySize;
internal uint ServerCertIssuerSize;
internal uint ServerCertSubjectSize;
internal sbyte* pServerCertIssuer;
internal sbyte* pServerCertSubject;
internal HTTP_SSL_CLIENT_CERT_INFO* pClientCertInfo;
internal uint SslClientCertNegotiated;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_REQUEST
{
internal uint Flags;
internal ulong ConnectionId;
internal ulong RequestId;
internal ulong UrlContext;
internal HTTP_VERSION Version;
internal HTTP_VERB Verb;
internal ushort UnknownVerbLength;
internal ushort RawUrlLength;
internal sbyte* pUnknownVerb;
internal sbyte* pRawUrl;
internal HTTP_COOKED_URL CookedUrl;
internal HTTP_TRANSPORT_ADDRESS Address;
internal HTTP_REQUEST_HEADERS Headers;
internal ulong BytesReceived;
internal ushort EntityChunkCount;
internal HTTP_DATA_CHUNK* pEntityChunks;
internal ulong RawConnectionId;
internal HTTP_SSL_INFO* pSslInfo;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_REQUEST_V2
{
internal HTTP_REQUEST RequestV1;
internal ushort RequestInfoCount;
internal HTTP_REQUEST_INFO* pRequestInfo;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_COOKED_URL
{
internal ushort FullUrlLength;
internal ushort HostLength;
internal ushort AbsPathLength;
internal ushort QueryStringLength;
internal ushort* pFullUrl;
internal ushort* pHost;
internal ushort* pAbsPath;
internal ushort* pQueryString;
}
[StructLayout(LayoutKind.Sequential)]
internal struct HTTP_REQUEST_CHANNEL_BIND_STATUS
{
internal IntPtr ServiceName;
internal IntPtr ChannelToken;
internal uint ChannelTokenSize;
internal uint Flags;
}
internal enum HTTP_SERVER_PROPERTY
{
HttpServerAuthenticationProperty,
HttpServerLoggingProperty,
HttpServerQosProperty,
HttpServerTimeoutsProperty,
HttpServerQueueLengthProperty,
HttpServerStateProperty,
HttpServer503VerbosityProperty,
HttpServerBindingProperty,
HttpServerExtendedAuthenticationProperty,
HttpServerListenEndpointProperty,
HttpServerChannelBindProperty,
HttpServerProtectionLevelProperty,
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct HTTP_REQUEST_TOKEN_BINDING_INFO
{
public byte* TokenBinding;
public uint TokenBindingSize;
public byte* TlsUnique;
public uint TlsUniqueSize;
public IntPtr KeyType;
}
internal enum TOKENBINDING_HASH_ALGORITHM : byte
{
TOKENBINDING_HASH_ALGORITHM_SHA256 = 4,
}
internal enum TOKENBINDING_SIGNATURE_ALGORITHM : byte
{
TOKENBINDING_SIGNATURE_ALGORITHM_RSA = 1,
TOKENBINDING_SIGNATURE_ALGORITHM_ECDSAP256 = 3,
}
internal enum TOKENBINDING_TYPE : byte
{
TOKENBINDING_TYPE_PROVIDED = 0,
TOKENBINDING_TYPE_REFERRED = 1,
}
internal enum TOKENBINDING_EXTENSION_FORMAT
{
TOKENBINDING_EXTENSION_FORMAT_UNDEFINED = 0,
}
[StructLayout(LayoutKind.Sequential)]
internal struct TOKENBINDING_IDENTIFIER
{
public TOKENBINDING_TYPE bindingType;
public TOKENBINDING_HASH_ALGORITHM hashAlgorithm;
public TOKENBINDING_SIGNATURE_ALGORITHM signatureAlgorithm;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct TOKENBINDING_RESULT_DATA
{
public uint identifierSize;
public TOKENBINDING_IDENTIFIER* identifierData;
public TOKENBINDING_EXTENSION_FORMAT extensionFormat;
public uint extensionSize;
public IntPtr extensionData;
}
[StructLayout(LayoutKind.Sequential)]
internal unsafe struct TOKENBINDING_RESULT_LIST
{
public uint resultCount;
public TOKENBINDING_RESULT_DATA* resultData;
}
[Flags]
internal enum HTTP_FLAGS : uint
{
NONE = 0x00000000,
HTTP_RECEIVE_REQUEST_FLAG_COPY_BODY = 0x00000001,
HTTP_RECEIVE_SECURE_CHANNEL_TOKEN = 0x00000001,
HTTP_SEND_RESPONSE_FLAG_DISCONNECT = 0x00000001,
HTTP_SEND_RESPONSE_FLAG_MORE_DATA = 0x00000002,
HTTP_SEND_RESPONSE_FLAG_BUFFER_DATA = 0x00000004,
HTTP_SEND_RESPONSE_FLAG_RAW_HEADER = 0x00000004,
HTTP_SEND_REQUEST_FLAG_MORE_DATA = 0x00000001,
HTTP_PROPERTY_FLAG_PRESENT = 0x00000001,
HTTP_INITIALIZE_SERVER = 0x00000001,
HTTP_INITIALIZE_CBT = 0x00000004,
HTTP_SEND_RESPONSE_FLAG_OPAQUE = 0x00000040,
}
private const int HttpHeaderRequestMaximum = (int)HttpRequestHeader.UserAgent + 1;
private const int HttpHeaderResponseMaximum = (int)HttpResponseHeader.WwwAuthenticate + 1;
internal static class HTTP_REQUEST_HEADER_ID
{
internal static string ToString(int position)
{
return s_strings[position];
}
private static readonly string[] s_strings = {
"Cache-Control",
"Connection",
"Date",
"Keep-Alive",
"Pragma",
"Trailer",
"Transfer-Encoding",
"Upgrade",
"Via",
"Warning",
"Allow",
"Content-Length",
"Content-Type",
"Content-Encoding",
"Content-Language",
"Content-Location",
"Content-MD5",
"Content-Range",
"Expires",
"Last-Modified",
"Accept",
"Accept-Charset",
"Accept-Encoding",
"Accept-Language",
"Authorization",
"Cookie",
"Expect",
"From",
"Host",
"If-Match",
"If-Modified-Since",
"If-None-Match",
"If-Range",
"If-Unmodified-Since",
"Max-Forwards",
"Proxy-Authorization",
"Referer",
"Range",
"Te",
"Translate",
"User-Agent",
};
}
internal enum HTTP_TIMEOUT_TYPE
{
EntityBody,
DrainEntityBody,
RequestQueue,
IdleConnection,
HeaderWait,
MinSendRate,
}
[StructLayout(LayoutKind.Sequential)]
internal struct HTTP_TIMEOUT_LIMIT_INFO
{
internal HTTP_FLAGS Flags;
internal ushort EntityBody;
internal ushort DrainEntityBody;
internal ushort RequestQueue;
internal ushort IdleConnection;
internal ushort HeaderWait;
internal uint MinSendRate;
}
[StructLayout(LayoutKind.Sequential)]
internal struct HTTPAPI_VERSION
{
internal ushort HttpApiMajorVersion;
internal ushort HttpApiMinorVersion;
}
[StructLayout(LayoutKind.Sequential)]
internal struct HTTP_BINDING_INFO
{
internal HTTP_FLAGS Flags;
internal IntPtr RequestQueueHandle;
}
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpInitialize(HTTPAPI_VERSION version, uint flags, void* pReserved);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern uint HttpSetUrlGroupProperty(ulong urlGroupId, HTTP_SERVER_PROPERTY serverProperty, IntPtr pPropertyInfo, uint propertyInfoLength);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpCreateServerSession(HTTPAPI_VERSION version, ulong* serverSessionId, uint reserved);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpCreateUrlGroup(ulong serverSessionId, ulong* urlGroupId, uint reserved);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern uint HttpCloseUrlGroup(ulong urlGroupId);
[DllImport(Libraries.HttpApi, CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern unsafe uint HttpCreateRequestQueue(HTTPAPI_VERSION version, string pName,
Interop.Kernel32.SECURITY_ATTRIBUTES* pSecurityAttributes, uint flags, out HttpRequestQueueV2Handle pReqQueueHandle);
[DllImport(Libraries.HttpApi, CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern uint HttpAddUrlToUrlGroup(ulong urlGroupId, string pFullyQualifiedUrl, ulong context, uint pReserved);
[DllImport(Libraries.HttpApi, CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern uint HttpRemoveUrlFromUrlGroup(ulong urlGroupId, string pFullyQualifiedUrl, uint flags);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpReceiveHttpRequest(SafeHandle requestQueueHandle, ulong requestId, uint flags, HTTP_REQUEST* pRequestBuffer, uint requestBufferLength, uint* pBytesReturned, NativeOverlapped* pOverlapped);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpSendHttpResponse(SafeHandle requestQueueHandle, ulong requestId, uint flags, HTTP_RESPONSE* pHttpResponse, void* pCachePolicy, uint* pBytesSent, SafeLocalAllocHandle pRequestBuffer, uint requestBufferLength, NativeOverlapped* pOverlapped, void* pLogData);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpWaitForDisconnect(SafeHandle requestQueueHandle, ulong connectionId, NativeOverlapped* pOverlapped);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpReceiveRequestEntityBody(SafeHandle requestQueueHandle, ulong requestId, uint flags, void* pEntityBuffer, uint entityBufferLength, out uint bytesReturned, NativeOverlapped* pOverlapped);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpSendResponseEntityBody(SafeHandle requestQueueHandle, ulong requestId, uint flags, ushort entityChunkCount, HTTP_DATA_CHUNK* pEntityChunks, uint* pBytesSent, SafeLocalAllocHandle pRequestBuffer, uint requestBufferLength, NativeOverlapped* pOverlapped, void* pLogData);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpCloseRequestQueue(IntPtr pReqQueueHandle);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern uint HttpCancelHttpRequest(SafeHandle requestQueueHandle, ulong requestId, IntPtr pOverlapped);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern uint HttpCloseServerSession(ulong serverSessionId);
internal sealed class SafeLocalFreeChannelBinding : ChannelBinding
{
private const int LMEM_FIXED = 0;
private int _size;
private SafeLocalFreeChannelBinding() { }
public override int Size
{
get { return _size; }
}
public static SafeLocalFreeChannelBinding LocalAlloc(int cb)
{
SafeLocalFreeChannelBinding result = HttpApi.LocalAlloc(LMEM_FIXED, (UIntPtr)cb);
if (result.IsInvalid)
{
result.SetHandleAsInvalid();
throw new OutOfMemoryException();
}
result._size = cb;
return result;
}
override protected bool ReleaseHandle()
{
return Interop.Kernel32.LocalFree(handle) == IntPtr.Zero;
}
}
[DllImport(Libraries.Kernel32, SetLastError = true)]
internal static extern SafeLocalFreeChannelBinding LocalAlloc(int uFlags, UIntPtr sizetdwBytes);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpReceiveClientCertificate(SafeHandle requestQueueHandle, ulong connectionId, uint flags, HTTP_SSL_CLIENT_CERT_INFO* pSslClientCertInfo, uint sslClientCertInfoSize, uint* pBytesReceived, NativeOverlapped* pOverlapped);
[DllImport(Libraries.HttpApi, SetLastError = true)]
internal static extern unsafe uint HttpReceiveClientCertificate(SafeHandle requestQueueHandle, ulong connectionId, uint flags, byte* pSslClientCertInfo, uint sslClientCertInfoSize, uint* pBytesReceived, NativeOverlapped* pOverlapped);
[Flags]
internal enum FileCompletionNotificationModes : byte
{
None = 0,
SkipCompletionPortOnSuccess = 1,
SkipSetEventOnHandle = 2
}
[DllImport(Libraries.Kernel32, SetLastError = true)]
internal static extern unsafe bool SetFileCompletionNotificationModes(SafeHandle handle, FileCompletionNotificationModes modes);
internal static readonly string[] HttpVerbs = new string[]
{
null,
"Unknown",
"Invalid",
"OPTIONS",
"GET",
"HEAD",
"POST",
"PUT",
"DELETE",
"TRACE",
"CONNECT",
"TRACK",
"MOVE",
"COPY",
"PROPFIND",
"PROPPATCH",
"MKCOL",
"LOCK",
"UNLOCK",
"SEARCH",
};
internal static class HTTP_RESPONSE_HEADER_ID
{
internal enum Enum
{
HttpHeaderCacheControl = 0, // general-header [section 4.5]
HttpHeaderConnection = 1, // general-header [section 4.5]
HttpHeaderDate = 2, // general-header [section 4.5]
HttpHeaderKeepAlive = 3, // general-header [not in rfc]
HttpHeaderPragma = 4, // general-header [section 4.5]
HttpHeaderTrailer = 5, // general-header [section 4.5]
HttpHeaderTransferEncoding = 6, // general-header [section 4.5]
HttpHeaderUpgrade = 7, // general-header [section 4.5]
HttpHeaderVia = 8, // general-header [section 4.5]
HttpHeaderWarning = 9, // general-header [section 4.5]
HttpHeaderAllow = 10, // entity-header [section 7.1]
HttpHeaderContentLength = 11, // entity-header [section 7.1]
HttpHeaderContentType = 12, // entity-header [section 7.1]
HttpHeaderContentEncoding = 13, // entity-header [section 7.1]
HttpHeaderContentLanguage = 14, // entity-header [section 7.1]
HttpHeaderContentLocation = 15, // entity-header [section 7.1]
HttpHeaderContentMd5 = 16, // entity-header [section 7.1]
HttpHeaderContentRange = 17, // entity-header [section 7.1]
HttpHeaderExpires = 18, // entity-header [section 7.1]
HttpHeaderLastModified = 19, // entity-header [section 7.1]
// Response Headers
HttpHeaderAcceptRanges = 20, // response-header [section 6.2]
HttpHeaderAge = 21, // response-header [section 6.2]
HttpHeaderEtag = 22, // response-header [section 6.2]
HttpHeaderLocation = 23, // response-header [section 6.2]
HttpHeaderProxyAuthenticate = 24, // response-header [section 6.2]
HttpHeaderRetryAfter = 25, // response-header [section 6.2]
HttpHeaderServer = 26, // response-header [section 6.2]
HttpHeaderSetCookie = 27, // response-header [not in rfc]
HttpHeaderVary = 28, // response-header [section 6.2]
HttpHeaderWwwAuthenticate = 29, // response-header [section 6.2]
HttpHeaderResponseMaximum = 30,
HttpHeaderMaximum = 41
}
private static readonly string[] s_strings =
{
"Cache-Control",
"Connection",
"Date",
"Keep-Alive",
"Pragma",
"Trailer",
"Transfer-Encoding",
"Upgrade",
"Via",
"Warning",
"Allow",
"Content-Length",
"Content-Type",
"Content-Encoding",
"Content-Language",
"Content-Location",
"Content-MD5",
"Content-Range",
"Expires",
"Last-Modified",
"Accept-Ranges",
"Age",
"ETag",
"Location",
"Proxy-Authenticate",
"Retry-After",
"Server",
"Set-Cookie",
"Vary",
"WWW-Authenticate",
};
private static readonly Dictionary<string, int> s_hashtable = CreateTable();
private static Dictionary<string, int> CreateTable()
{
var table = new Dictionary<string, int>((int)Enum.HttpHeaderResponseMaximum);
for (int i = 0; i < (int)Enum.HttpHeaderResponseMaximum; i++)
{
table.Add(s_strings[i], i);
}
return table;
}
internal static int IndexOfKnownHeader(string headerName)
{
int index;
return s_hashtable.TryGetValue(headerName, out index) ? index : -1;
}
internal static string ToString(int position)
{
return s_strings[position];
}
}
private static unsafe string GetKnownHeader(HTTP_REQUEST* request, long fixup, int headerIndex)
{
if (NetEventSource.IsEnabled) { NetEventSource.Enter(null); }
string header = null;
HTTP_KNOWN_HEADER* pKnownHeader = (&request->Headers.KnownHeaders) + headerIndex;
if (NetEventSource.IsEnabled)
{
NetEventSource.Info(null, $"HttpApi::GetKnownHeader() pKnownHeader:0x{(IntPtr)pKnownHeader}");
NetEventSource.Info(null, $"HttpApi::GetKnownHeader() pRawValue:0x{(IntPtr)pKnownHeader->pRawValue} RawValueLength:{pKnownHeader->RawValueLength}");
}
// For known headers, when header value is empty, RawValueLength will be 0 and
// pRawValue will point to empty string
if (pKnownHeader->pRawValue != null)
{
header = new string(pKnownHeader->pRawValue + fixup, 0, pKnownHeader->RawValueLength);
}
if (NetEventSource.IsEnabled) { NetEventSource.Exit(null, $"HttpApi::GetKnownHeader() return:{header}"); }
return header;
}
internal static unsafe string GetKnownHeader(HTTP_REQUEST* request, int headerIndex)
{
return GetKnownHeader(request, 0, headerIndex);
}
internal static unsafe string GetKnownHeader(byte[] memoryBlob, IntPtr originalAddress, int headerIndex)
{
fixed (byte* pMemoryBlob = memoryBlob)
{
return GetKnownHeader((HTTP_REQUEST*)pMemoryBlob, pMemoryBlob - (byte*)originalAddress, headerIndex);
}
}
private static unsafe string GetVerb(HTTP_REQUEST* request, long fixup)
{
string verb = null;
if ((int)request->Verb > (int)HTTP_VERB.HttpVerbUnknown && (int)request->Verb < (int)HTTP_VERB.HttpVerbMaximum)
{
verb = HttpVerbs[(int)request->Verb];
}
else if (request->Verb == HTTP_VERB.HttpVerbUnknown && request->pUnknownVerb != null)
{
verb = new string(request->pUnknownVerb + fixup, 0, request->UnknownVerbLength);
}
return verb;
}
internal static unsafe string GetVerb(HTTP_REQUEST* request)
{
return GetVerb(request, 0);
}
internal static unsafe string GetVerb(byte[] memoryBlob, IntPtr originalAddress)
{
fixed (byte* pMemoryBlob = memoryBlob)
{
return GetVerb((HTTP_REQUEST*)pMemoryBlob, pMemoryBlob - (byte*)originalAddress);
}
}
// Server API
internal static unsafe WebHeaderCollection GetHeaders(byte[] memoryBlob, IntPtr originalAddress)
{
NetEventSource.Enter(null);
// Return value.
WebHeaderCollection headerCollection = new WebHeaderCollection();
fixed (byte* pMemoryBlob = memoryBlob)
{
HTTP_REQUEST* request = (HTTP_REQUEST*)pMemoryBlob;
long fixup = pMemoryBlob - (byte*)originalAddress;
int index;
// unknown headers
if (request->Headers.UnknownHeaderCount != 0)
{
HTTP_UNKNOWN_HEADER* pUnknownHeader = (HTTP_UNKNOWN_HEADER*)(fixup + (byte*)request->Headers.pUnknownHeaders);
for (index = 0; index < request->Headers.UnknownHeaderCount; index++)
{
// For unknown headers, when header value is empty, RawValueLength will be 0 and
// pRawValue will be null.
if (pUnknownHeader->pName != null && pUnknownHeader->NameLength > 0)
{
string headerName = new string(pUnknownHeader->pName + fixup, 0, pUnknownHeader->NameLength);
string headerValue;
if (pUnknownHeader->pRawValue != null && pUnknownHeader->RawValueLength > 0)
{
headerValue = new string(pUnknownHeader->pRawValue + fixup, 0, pUnknownHeader->RawValueLength);
}
else
{
headerValue = string.Empty;
}
headerCollection.Add(headerName, headerValue);
}
pUnknownHeader++;
}
}
// known headers
HTTP_KNOWN_HEADER* pKnownHeader = &request->Headers.KnownHeaders;
for (index = 0; index < HttpHeaderRequestMaximum; index++)
{
// For known headers, when header value is empty, RawValueLength will be 0 and
// pRawValue will point to empty string ("\0")
if (pKnownHeader->pRawValue != null)
{
string headerValue = new string(pKnownHeader->pRawValue + fixup, 0, pKnownHeader->RawValueLength);
headerCollection.Add(HTTP_REQUEST_HEADER_ID.ToString(index), headerValue);
}
pKnownHeader++;
}
}
NetEventSource.Exit(null);
return headerCollection;
}
internal static unsafe uint GetChunks(byte[] memoryBlob, IntPtr originalAddress, ref int dataChunkIndex, ref uint dataChunkOffset, byte[] buffer, int offset, int size)
{
if (NetEventSource.IsEnabled)
{
NetEventSource.Enter(null, $"HttpApi::GetChunks() memoryBlob:{memoryBlob}");
}
// Return value.
uint dataRead = 0;
fixed (byte* pMemoryBlob = memoryBlob)
{
HTTP_REQUEST* request = (HTTP_REQUEST*)pMemoryBlob;
long fixup = pMemoryBlob - (byte*)originalAddress;
if (request->EntityChunkCount > 0 && dataChunkIndex < request->EntityChunkCount && dataChunkIndex != -1)
{
HTTP_DATA_CHUNK* pDataChunk = (HTTP_DATA_CHUNK*)(fixup + (byte*)&request->pEntityChunks[dataChunkIndex]);
fixed (byte* pReadBuffer = buffer)
{
byte* pTo = &pReadBuffer[offset];
while (dataChunkIndex < request->EntityChunkCount && dataRead < size)
{
if (dataChunkOffset >= pDataChunk->BufferLength)
{
dataChunkOffset = 0;
dataChunkIndex++;
pDataChunk++;
}
else
{
byte* pFrom = pDataChunk->pBuffer + dataChunkOffset + fixup;
uint bytesToRead = pDataChunk->BufferLength - (uint)dataChunkOffset;
if (bytesToRead > (uint)size)
{
bytesToRead = (uint)size;
}
for (uint i = 0; i < bytesToRead; i++)
{
*(pTo++) = *(pFrom++);
}
dataRead += bytesToRead;
dataChunkOffset += bytesToRead;
}
}
}
}
//we're finished.
if (dataChunkIndex == request->EntityChunkCount)
{
dataChunkIndex = -1;
}
}
if (NetEventSource.IsEnabled)
{
NetEventSource.Exit(null);
}
return dataRead;
}
internal static unsafe HTTP_VERB GetKnownVerb(byte[] memoryBlob, IntPtr originalAddress)
{
NetEventSource.Enter(null);
// Return value.
HTTP_VERB verb = HTTP_VERB.HttpVerbUnknown;
fixed (byte* pMemoryBlob = memoryBlob)
{
HTTP_REQUEST* request = (HTTP_REQUEST*)pMemoryBlob;
if ((int)request->Verb > (int)HTTP_VERB.HttpVerbUnparsed && (int)request->Verb < (int)HTTP_VERB.HttpVerbMaximum)
{
verb = request->Verb;
}
}
NetEventSource.Exit(null);
return verb;
}
internal static unsafe IPEndPoint GetRemoteEndPoint(byte[] memoryBlob, IntPtr originalAddress)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(null);
SocketAddress v4address = new SocketAddress(AddressFamily.InterNetwork, IPv4AddressSize);
SocketAddress v6address = new SocketAddress(AddressFamily.InterNetworkV6, IPv6AddressSize);
fixed (byte* pMemoryBlob = memoryBlob)
{
HTTP_REQUEST* request = (HTTP_REQUEST*)pMemoryBlob;
IntPtr address = request->Address.pRemoteAddress != null ? (IntPtr)(pMemoryBlob - (byte*)originalAddress + (byte*)request->Address.pRemoteAddress) : IntPtr.Zero;
CopyOutAddress(address, ref v4address, ref v6address);
}
IPEndPoint endpoint = null;
if (v4address != null)
{
endpoint = new IPEndPoint(IPAddress.Any, IPEndPoint.MinPort).Create(v4address) as IPEndPoint;
}
else if (v6address != null)
{
endpoint = new IPEndPoint(IPAddress.IPv6Any, IPEndPoint.MinPort).Create(v6address) as IPEndPoint;
}
if (NetEventSource.IsEnabled) NetEventSource.Exit(null);
return endpoint;
}
internal static unsafe IPEndPoint GetLocalEndPoint(byte[] memoryBlob, IntPtr originalAddress)
{
if (NetEventSource.IsEnabled) NetEventSource.Enter(null);
SocketAddress v4address = new SocketAddress(AddressFamily.InterNetwork, IPv4AddressSize);
SocketAddress v6address = new SocketAddress(AddressFamily.InterNetworkV6, IPv6AddressSize);
fixed (byte* pMemoryBlob = memoryBlob)
{
HTTP_REQUEST* request = (HTTP_REQUEST*)pMemoryBlob;
IntPtr address = request->Address.pLocalAddress != null ? (IntPtr)(pMemoryBlob - (byte*)originalAddress + (byte*)request->Address.pLocalAddress) : IntPtr.Zero;
CopyOutAddress(address, ref v4address, ref v6address);
}
IPEndPoint endpoint = null;
if (v4address != null)
{
endpoint = s_any.Create(v4address) as IPEndPoint;
}
else if (v6address != null)
{
endpoint = s_ipv6Any.Create(v6address) as IPEndPoint;
}
if (NetEventSource.IsEnabled) NetEventSource.Exit(null);
return endpoint;
}
private static unsafe void CopyOutAddress(IntPtr address, ref SocketAddress v4address, ref SocketAddress v6address)
{
if (address != IntPtr.Zero)
{
ushort addressFamily = *((ushort*)address);
if (addressFamily == (ushort)AddressFamily.InterNetwork)
{
v6address = null;
for (int index = 2; index < IPv4AddressSize; index++)
{
v4address[index] = ((byte*)address)[index];
}
return;
}
if (addressFamily == (ushort)AddressFamily.InterNetworkV6)
{
v4address = null;
for (int index = 2; index < IPv6AddressSize; index++)
{
v6address[index] = ((byte*)address)[index];
}
return;
}
}
v4address = null;
v6address = null;
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
namespace CCBaseline
{
using System.Diagnostics.Contracts;
using Results = Dictionary<string, List<Result>>;
class Result
{
public Stats Original;
public Stats BL0;
public Stats BL1;
public Stats BL2;
internal void InsertBaseline(Stats stats)
{
if (stats.BaselineVersion == stats.AnalyzedVersion) {
this.BL0 = stats;
return;
}
// earlier
if (BL1.BaselineVersion == null)
{
BL1 = stats;
return;
}
if (string.Compare(stats.BaselineVersion, BL1.BaselineVersion) < 0)
{
BL2 = stats;
}
else
{
// swap
BL2 = BL1;
BL1 = stats;
}
}
}
class LocalStats
{
public string MethodsAnalyzed;
public string MethodsWithBaseLine;
public string MethodsWithoutBaseLine;
public string MethodsWithIdenticalBaseLine;
public string MethodsWith0Warnings;
public string Checked;
public string Correct;
public string Unknown;
public string Unreached;
public string Warnings
{
get
{
if (this.Unknown != null) return this.Unknown;
if (this.Checked != null) return "0";
return null;
}
}
public string NewMethods
{
get
{
if (MethodsAnalyzed == null || MethodsWithBaseLine == null || MethodsWithoutBaseLine == null || MethodsWithIdenticalBaseLine == null) return "";
return (int.Parse(MethodsAnalyzed) - int.Parse(MethodsWithBaseLine)).ToString();
}
}
public string MethodsWithDiffs
{
get
{
if (MethodsAnalyzed == null || MethodsWithBaseLine == null || MethodsWithoutBaseLine == null || MethodsWithIdenticalBaseLine == null) return "";
return (int.Parse(MethodsWithBaseLine) - int.Parse(MethodsWithIdenticalBaseLine)).ToString();
}
}
}
struct Stats
{
public string ProjectName;
public string MethodsAnalyzed;
public string MethodsWith0Warnings;
public string Checked;
public string Correct;
public string Unknown;
public string Unreached;
public string BaselineVersion;
// public bool Original;
public string AnalyzedVersion;
public string Warnings
{
get
{
if (this.Unknown != null) return this.Unknown;
if (this.Checked != null) return "0";
return null;
}
}
public int WarningsNum
{
get
{
if (Warnings != null) return Int32.Parse(Warnings);
return 0;
}
}
}
class Program
{
static int Main(string[] args)
{
if (args.Length < 1)
{
Console.WriteLine("Usage: blresults <repoDir>");
return -1;
}
var results = new Results();
if (args[0].EndsWith("*"))
{
RunMany(args, results);
}
else if (args[0].EndsWith(".log"))
{
var lr = RunOneLog(args[0]);
PrintStats(lr, args.Length > 1);
return 0;
}
else
{
RunOne(args[0], results);
}
PrintStats(results);
return 0;
}
static int RunMany(string[] args, Results results)
{
Contract.Requires(args != null);
Contract.Requires(Contract.ForAll(args, a => a != null));
var len = args[0].Length - 1;
Contract.Assume(len >= 0);
var root = args[0].Substring(0, len);
var dirs = Directory.EnumerateDirectories(root).OrderBy(s => s).ToArray();
Contract.Assume(Contract.ForAll(dirs, d => d != null));
var minversion = "";
for (int i = 0; i < dirs.Length; i++)
{
var dir = dirs[i];
var match = Regex.Match(dir, @"[\\/](\d+)$");
if (match.Success)
{
// a version number
var version = match.Groups[1].Value;
if (string.Compare(version, minversion) < 0) continue;
RunOne(dir, results);
}
}
return 0;
}
private static void PrintStats(Dictionary<string, LocalStats> results, bool latex)
{
Console.WriteLine(FormatLine(latex,
"Assembly",
"Methods",
"Changed",
"New",
"Checks",
"Correct",
"Warnings",
"Unreached",
"Method w/0W"));
var lines = GetStats(results, latex).ToArray();
Array.Sort(lines);
foreach (var line in lines)
{
Console.WriteLine(line);
}
}
static string[] toSkip = {
"DW", "S", "DGC", "DGCM", "DGL", "DGS", "GC", "GCM", "GL", "GPBS", "GPBSM", "GPTF", "GPU", "GS", "TFD" };
private static IEnumerable<string> GetStats(Dictionary<string, LocalStats> results, bool latex)
{
foreach (var pair in results)
{
var result = pair.Value;
var name = NormalizeName(pair.Key);
if (latex && toSkip.Contains(name)) continue;
yield return FormatLine(latex, name, result.MethodsAnalyzed, result.MethodsWithDiffs, result.NewMethods, result.Checked, result.Correct, result.Warnings, result.Unreached, result.MethodsWith0Warnings);
}
}
private static string FormatLine(bool latex, params string[] args)
{
var joiner = latex ? " & " : ", ";
var terminator = latex ? @" \\" : "";
var padded = args.Take(1).Select(s => FixedWidth(s, true)).Concat(args.Skip(1).Select(s => FixedWidth(s)).ToArray());
return string.Join(joiner, padded) + terminator;
}
private static string FixedWidth(string s, bool left = false)
{
if (left)
{
return String.Format("{0,-10}", s);
}
return String.Format("{0,10}", s);
}
/// <summary>
/// Remove dots, keep only uppercase letters, except change Tf to TF
/// </summary>
private static string NormalizeName(string name)
{
name = name.Replace(".", "");
name = name.Replace("Tf", "TF");
name = name.Replace("Cm", "CM");
var chars = name.Where(Char.IsUpper);
return String.Concat(chars);
}
private static void PrintStats(Results results)
{
foreach (var pair in results)
{
Console.WriteLine("Project {0}:", pair.Key);
Console.WriteLine("Version,Methods,Checks,Correct,Warnings,Methods w/0W,BL0 Warnings,BL0 Mw0W,BL1 Warnings,BL1 Mw0W,BL2 Warnings,BL2 Mw0W,New Warnings");
foreach (var result in pair.Value)
{
Stats min = Minimum(result.BL1, result.BL2);
var newWarnings = (min.WarningsNum >= result.BL0.WarningsNum) ? min.WarningsNum - result.BL0.WarningsNum : min.WarningsNum;
Console.WriteLine(string.Join(",", result.Original.AnalyzedVersion, result.Original.MethodsAnalyzed, result.Original.Checked, result.Original.Correct, result.Original.Warnings, result.Original.MethodsWith0Warnings, result.BL0.Warnings, result.BL0.MethodsWith0Warnings, result.BL1.Warnings, result.BL1.MethodsWith0Warnings, result.BL2.Warnings, result.BL2.MethodsWith0Warnings, newWarnings));
}
Console.WriteLine();
}
}
private static Stats Minimum(Stats stats1, Stats stats2)
{
if (stats1.Warnings == null || stats2.Warnings == null) return stats1;
int w1, w2;
if (!Int32.TryParse(stats1.Warnings, out w1) || !Int32.TryParse(stats2.Warnings, out w2)) return stats1;
if (w1 < w2) return stats1;
return stats2;
}
static void RunOne(string dir, Results results)
{
if (!Directory.Exists(dir))
{
Console.WriteLine("Directory {0} does not exist", dir);
}
string analyzedVersion = "<dummy>";
MatchOneValue(ref analyzedVersion, dir, @"(^|[\\/])(\d+)", groupNo: 2);
Dictionary<string,Result> localResults = new Dictionary<string,Result>();
foreach (var file in Directory.EnumerateFiles(dir, "*.txt"))
{
var original = file.Contains("save");
var vmatch = Regex.Match(file, @"buildlog.(\d+)");
if (!vmatch.Success) continue;
var version = vmatch.Groups[1].Value;
var stats = OneFileStats(file);
foreach (var projPair in stats)
{
var current = GetProjectResult(localResults, projPair.Key);
Contract.Assume(current != null);
var stat = new Stats
{
ProjectName = projPair.Key,
AnalyzedVersion = analyzedVersion,
BaselineVersion = version,
Checked = projPair.Value.Checked,
Correct = projPair.Value.Correct,
MethodsAnalyzed = projPair.Value.MethodsAnalyzed,
MethodsWith0Warnings = projPair.Value.MethodsWith0Warnings,
Unknown = projPair.Value.Unknown,
Unreached = projPair.Value.Unreached
};
if (original)
{
current.Original = stat;
}
else
{
current.InsertBaseline(stat);
}
}
}
foreach (var pair in localResults)
{
var list = GetProjectResults(results, pair.Key);
list.Add(pair.Value);
}
}
static Dictionary<string, LocalStats> RunOneLog(string file)
{
var stats = OneFileStats(file);
return stats;
}
private static LocalStats GetOrMaterialize(string project, Dictionary<string, LocalStats> dic)
{
Contract.Ensures(Contract.Result<LocalStats>() != null);
LocalStats result;
if (!dic.TryGetValue(project, out result))
{
result = new LocalStats();
dic.Add(project, result);
}
else
{
Contract.Assume(result != null);
}
return result;
}
private static Dictionary<string,LocalStats> OneFileStats(string file)
{
var result = new Dictionary<string, LocalStats>();
try
{
var lines = File.ReadAllLines(file);
LocalStats last = null;
for (int i = 0; i < lines.Length; i++)
{
var line = lines[i];
string projectName = "<dummy>";
if (MatchOneValue(ref projectName, line, @"^\s*(\d+(:\d+)?>)?CodeContracts:\s+([^:]+):", groupNo: 3)) {
if (!projectName.StartsWith("Checked "))
{
last = GetOrMaterialize(projectName, result);
MatchOneValue(ref last.MethodsAnalyzed, line, @"Total methods analyzed\s+(\d+)");
MatchOneValue(ref last.MethodsWith0Warnings, line, @"Methods with 0 warnings\s+(\d+)");
MatchOneValue(ref last.MethodsWithBaseLine, line, @"Methods with baseline:\s+(\d+)");
MatchOneValue(ref last.MethodsWithoutBaseLine, line, @"Methods w/o baseline:\s+(\d+)");
MatchOneValue(ref last.MethodsWithIdenticalBaseLine, line, @"Methods with identical baseline:\s+(\d+)");
MatchOneValue(ref last.Checked, line, @"Checked\s+(\d+)\s+assertion");
MatchOneValue(ref last.Correct, line, @"(\d+)\s+correct");
MatchOneValue(ref last.Unknown, line, @"(\d+)\s+unknown");
MatchOneValue(ref last.Unreached, line, @"(\d+)\s+unreached");
}
}
}
}
catch
{
}
return result;
}
private static Result GetProjectResult(Dictionary<string, Result> local, string projectName)
{
Result result;
if (!local.TryGetValue(projectName, out result))
{
result = new Result();
local.Add(projectName, result);
}
return result;
}
private static List<Result> GetProjectResults(Results all, string projectName)
{
Contract.Ensures(Contract.Result<List<Result>>() != null);
List<Result> list;
if (!all.TryGetValue(projectName, out list))
{
list = new List<Result>();
all.Add(projectName, list);
}
else
{
Contract.Assume(list != null);
}
return list;
}
private static bool MatchOneValue(ref string result, string line, string pattern, int groupNo = 1)
{
Contract.Requires(result != null);
Contract.Ensures(result != null);
var aMatch = Regex.Match(line, pattern);
if (aMatch.Success)
{
result = aMatch.Groups[groupNo].Value;
Contract.Assert(result != null);
}
return aMatch.Success;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using AgoBasico;
using System.Data.SqlClient;
using System.Data;
using AgoBasico.Android;
using AgoBasico.Log;
namespace AgoDAO
{
public class MultiplicadorDA
{
public int InserirMultiplicador(Multiplicador pMultiplicador)
{
try
{
string cabecalho = "INSERT INTO Multiplicador( ";
string valores = " VALUES( ";
List<SqlParameter> parametros = new List<SqlParameter>();
if(pMultiplicador.Nome != null)
{
cabecalho = cabecalho + " Nome, ";
valores = valores + " @nome, ";
parametros.Add(new SqlParameter("@nome", pMultiplicador.Nome));
}
if (pMultiplicador.Foto != null)
{
cabecalho = cabecalho + " Foto, ";
valores = valores + " @foto, ";
parametros.Add(new SqlParameter("@foto", pMultiplicador.Foto));
}
if (pMultiplicador.NomeCelula != null)
{
cabecalho = cabecalho + " NomeCelula, ";
valores = valores + " @nome_celula, ";
parametros.Add(new SqlParameter("@nome_celula", pMultiplicador.NomeCelula));
}
if (pMultiplicador.Supervisor != null)
{
cabecalho = cabecalho + " IDSupervisor, ";
valores = valores + " @id_supervisor, ";
parametros.Add(new SqlParameter("@id_supervisor", pMultiplicador.Supervisor.ID));
}
cabecalho = cabecalho.Substring(0, cabecalho.Length - 2) + ")";
valores = valores.Substring(0, valores.Length - 2) + ")";
StringBuilder lsql = new StringBuilder();
lsql.Append(cabecalho);
lsql.Append(valores);
int retorno = ConexaoSQLServer.Conexao.ExecutarComando(lsql.ToString(), parametros.ToArray());
return retorno;
}
catch (Exception ex)
{
throw ex;
}
}
public Multiplicador ObterMultiplicadorPorSupervisor(Supervisao pSupervisao)
{
try
{
StringBuilder lsql = new StringBuilder();
lsql.Append("SELECT * FROM Multiplicador WHERE IDSupervisor=@id_supervisor");
SqlParameter prm = new SqlParameter("@id_supervisor", pSupervisao.ID);
SqlParameter[] parametros = new SqlParameter[] { prm };
DataTable dt = ConexaoSQLServer.Conexao.ExecutarConsulta(lsql.ToString(), parametros);
Multiplicador m = null;
if (dt.Rows.Count > 0)
{
m = new Multiplicador();
m.ID = Convert.ToInt32(dt.Rows[0]["ID"].ToString());
m.Nome = dt.Rows[0]["Nome"].ToString();
m.NomeCelula = dt.Rows[0]["NomeCelula"].ToString();
m.Foto = dt.Rows[0]["Foto"].ToString();
}
return m;
}
catch (Exception ex)
{
throw ex;
}
}
public int AtualizarMultiplicador(Multiplicador pMultiplicador)
{
try
{
StringBuilder lsql = new StringBuilder();
lsql.Append(" UPDATE Multiplicador SET ");
List<SqlParameter> parametros = new List<SqlParameter>();
if (pMultiplicador.Nome != null)
{
lsql.Append(" Nome=@nome, ");
parametros.Add(new SqlParameter("@nome", pMultiplicador.Nome));
}
if (pMultiplicador.NomeCelula != null)
{
lsql.Append("NomeCelula=@nome_celula, ");
parametros.Add(new SqlParameter("@nome_celula", pMultiplicador.NomeCelula));
}
if (pMultiplicador.Foto != null)
{
lsql.Append("Foto=@foto, ");
parametros.Add(new SqlParameter("@foto", pMultiplicador.Foto));
}
lsql.Remove(lsql.Length - 2, 2);
lsql.Append(" WHERE ID=@id");
parametros.Add(new SqlParameter("@id", pMultiplicador.ID));
int retorno = ConexaoSQLServer.Conexao.ExecutarComando(lsql.ToString(), parametros.ToArray());
return retorno;
}
catch (Exception ex)
{
throw ex;
}
}
public int ExcluirMultiplicador(Multiplicador pMultiplicador)
{
try
{
string sql = "DELETE FROM Multiplicador WHERE ID = @id";
SqlParameter prm = new SqlParameter("@id", pMultiplicador.ID);
SqlParameter[] parametros = new SqlParameter[] { prm };
int retorno = ConexaoSQLServer.Conexao.ExecutarComando(sql, parametros);
return retorno;
}
catch (Exception ex)
{
throw ex;
}
}
public List<Multiplicador> ListarCelulasTabelaMultiplicador()
{
try
{
string sql = " SELECT * FROM Multiplicador ";
DataTable dt = ConexaoSQLServer.Conexao.ExecutarConsulta(sql, null);
List<Multiplicador> lista = new List<Multiplicador>();
for (int i = 0; i < dt.Rows.Count; i++)
{
Multiplicador celula = new Multiplicador();
celula.ID = Convert.ToInt32(dt.Rows[i]["ID"].ToString());
celula.Nome = dt.Rows[i]["Nome"].ToString();
celula.NomeCelula = dt.Rows[i]["NomeCelula"].ToString();
celula.Supervisor = new Supervisao() { ID = Convert.ToInt32(dt.Rows[i]["IDSupervisor"].ToString()) };
celula.Foto = dt.Rows[i]["Foto"].ToString();
lista.Add(celula);
}
return lista;
}
catch (Exception ex)
{
throw ex;
}
}
public List<Celula> ObterCelulaTabelaMultiplicador(string idCelula)
{
try
{
SqlParameter prm = new SqlParameter("@id", idCelula);
List<SqlParameter> parametros = new List<SqlParameter>();
parametros.Add(prm);
string sql = "SELECT m.id, m.NomeCelula, m.Nome, s.id as idSupervisor, a.Descricao " +
" FROM supervisor s INNER JOIN area a ON s.idarea = a.id " +
" INNER JOIN multiplicador m ON s.id = m.idsupervisor " +
" WHERE S.id = @id ";
DataTable dt = ConexaoSQLServer.Conexao.ExecutarConsulta(sql, parametros.ToArray());
List<Celula> listCelula = new List<Celula>();
if (dt != null && dt.Rows.Count > 0)
{
for (int i = 0; i < dt.Rows.Count; i++)
{
Celula celula = new Celula();
celula.ID = Convert.ToInt32(dt.Rows[i]["ID"].ToString());
celula.MultiplicadorNomeCelula = dt.Rows[i]["NomeCelula"].ToString();
celula.MultiplicadorNome = dt.Rows[i]["Nome"].ToString();
celula.SupervisorId = dt.Rows[i]["idSupervisor"].ToString();
celula.AreaDescricao = dt.Rows[i]["descricao"].ToString();
listCelula.Add(celula);
}
}
return listCelula;
}
catch (Exception ex)
{
throw ex;
}
}
public List<DetalhesCelula> ObterDetalhesCelulaTabelaMultiplicador(string idSupervisor)
{
StringBuilder sql = new StringBuilder();
sql.Append("SELECT StatusMes.Descricao as StatusMes, AvaliacaoCelula.IDSupervisor, AvaliacaoCelula.Valor as Nota, Avaliacao.ID AS IDAvaliacao, Indicador.Meta AS Meta, Indicador.Descricao AS Indicador, ");
sql.Append(" Avaliacao.Valor AS ResultadoAvaliacao, AvaliacaoCelula.IDMes ");
sql.Append(" FROM AvaliacaoCelula INNER JOIN ");
sql.Append(" Mes ON AvaliacaoCelula.IDMes = Mes.ID INNER JOIN ");
sql.Append(" Avaliacao ON AvaliacaoCelula.ID = Avaliacao.IDAvaliacaoCelula INNER JOIN ");
sql.Append(" Indicador ON Avaliacao.IDIndicador = Indicador.ID INNER JOIN ");
sql.Append(" StatusMes ON dbo.FUN_IDSTATUSMES(Mes.ID) = StatusMes.ID ");
sql.Append(" WHERE (StatusMes.Descricao = 'Avaliado') AND (AvaliacaoCelula.IDSupervisor = @IDSupervisor) ");
SqlParameter prm = new SqlParameter("@IDSupervisor", idSupervisor);
SqlParameter[] parametros = new SqlParameter[] { prm };
DataTable dt = ConexaoSQLServer.Conexao.ExecutarConsulta(sql.ToString(), parametros);
List<DetalhesCelula> listDetalhesCelula = new List<DetalhesCelula>();
var idMatricula = new SupervisaoDA().ConsultarUmaSupervisao(new Supervisao { ID = Convert.ToInt32(idSupervisor) }).Funcionario.IdMatricula;
if (dt != null && dt.Rows.Count > 0)
{
for (int i = 0; i < dt.Rows.Count; i++)
{
DetalhesCelula detCelula = new DetalhesCelula();
detCelula.StatusMes = dt.Rows[i]["StatusMes"].ToString();
detCelula.IdSupervisor = Convert.ToInt32(dt.Rows[i]["IDSupervisor"].ToString());
detCelula.Nota = Convert.ToDecimal(dt.Rows[i]["Nota"].ToString());
detCelula.IdAvaliacao = Convert.ToInt32(dt.Rows[i]["IDAvaliacao"].ToString());
detCelula.Indicador = dt.Rows[i]["Indicador"].ToString();
detCelula.Meta = dt.Rows[i]["Meta"].ToString();
detCelula.ResultadoAvaliacao1 = Convert.ToDecimal(dt.Rows[i]["ResultadoAvaliacao"].ToString());
detCelula.IdMes = Convert.ToInt32(dt.Rows[i]["IDMes"].ToString());
detCelula.IdMatricula = idMatricula;
listDetalhesCelula.Add(detCelula);
}
}
return listDetalhesCelula;
}
public List<Multiplicador> ConsultarMultiplicador(Multiplicador multiplicador)
{
try
{
StringBuilder sql = new StringBuilder( " SELECT ID, IDSupervisor, Nome, Foto, NomeCelula ");
sql.Append(" FROM Multiplicador ");
List<SqlParameter> parametros = new List<SqlParameter>();
if (multiplicador.ID != 0)
{
sql.Append(" WHERE (ID = @ID) ");
parametros.Add(new SqlParameter("@ID", multiplicador.ID.ToString()));
}
else if (multiplicador.Supervisor != null)
{
sql.Append(" WHERE (IDSupervisor = @IDSupervisor) ");
parametros.Add(new SqlParameter("@IDSupervisor", multiplicador.Supervisor.ID.ToString()));
}
DataTable dt = ConexaoSQLServer.Conexao.ExecutarConsulta(sql.ToString(), parametros.ToArray());
List<Multiplicador> lista = new List<Multiplicador>();
for (int i = 0; i < dt.Rows.Count; i++)
{
Multiplicador celula = new Multiplicador();
celula.ID = Convert.ToInt32(dt.Rows[i]["ID"].ToString());
celula.Nome = dt.Rows[i]["Nome"].ToString();
celula.NomeCelula = dt.Rows[i]["NomeCelula"].ToString();
if (dt.Rows[i]["IDSupervisor"].ToString() != "")
{
Supervisao supervisor = new Supervisao();
supervisor.ID = Convert.ToInt32(dt.Rows[i]["IDSupervisor"].ToString());
supervisor = new SupervisaoDA().ConsultarUmaSupervisao(supervisor);
if (supervisor != null)
celula.Supervisor = supervisor;
}
celula.Foto = dt.Rows[i]["Foto"].ToString();
lista.Add(celula);
}
return lista;
}
catch (Exception ex)
{
throw ex;
}
}
public Multiplicador ConsultarUmMultiplicador(Multiplicador multiplicador)
{
try
{
Multiplicador m = null;
List<Multiplicador> lista = ConsultarMultiplicador(multiplicador);
if (lista.Count == 1)
{
m = lista[0];
}
return m;
}
catch (Exception ex)
{
throw ex;
}
}
public int CalcularNotas(Multiplicador multiplicador, int idMes)
{
//multiplicador = ConsultarUmMultiplicador(multiplicador);
#region AvaliacaoCelula
StringBuilder sql = new StringBuilder("SELECT * FROM AvaliacaoCelula WHERE IDSupervisor = @supervisorID and IDMes = @mesID;");
SqlParameter prm = new SqlParameter("@supervisorID", multiplicador.Supervisor.ID);
SqlParameter prm2 = new SqlParameter("@mesID", idMes);
SqlParameter[] parametros = new SqlParameter[] { prm, prm2 };
DataTable dt = ConexaoSQLServer.Conexao.ExecutarConsulta(sql.ToString(), parametros);
AvaliacaoCelula avaliacaoCelula = null;
avaliacaoCelula = new AvaliacaoCelula();
avaliacaoCelula.ID = Convert.ToInt32(dt.Rows[0]["ID"].ToString());
avaliacaoCelula.IdMes = Convert.ToInt32(dt.Rows[0]["IDMes"]);
avaliacaoCelula.IdSupervisor = Convert.ToInt32(dt.Rows[0]["IdSupervisor"]);
avaliacaoCelula.Valor = Convert.ToDecimal(dt.Rows[0]["Valor"]);
#endregion
int retorno = Calculo(avaliacaoCelula);
return retorno;
}
private static int Calculo(AvaliacaoCelula avaliacaoCelula)
{
#region Avaliacao
StringBuilder sqlAvaliacao = new StringBuilder("SELECT * FROM Avaliacao WHERE IDAvaliacaoCelula = @IDAvaliacaoCelula;");
SqlParameter prmAvaliacao = new SqlParameter("@IDAvaliacaoCelula", avaliacaoCelula.ID);
SqlParameter[] parametrosAvaliacao = new SqlParameter[] { prmAvaliacao };
DataTable dtAvaliacao = ConexaoSQLServer.Conexao.ExecutarConsulta(sqlAvaliacao.ToString(), parametrosAvaliacao);
List<Avaliacao> avaliacoes = new List<Avaliacao>();
if (dtAvaliacao.Rows.Count > 0)
{
for (int i = 0; i < dtAvaliacao.Rows.Count; i++)
{
Avaliacao avaliacao = new Avaliacao();
avaliacao = new Avaliacao();
avaliacao.ID = Convert.ToInt32(dtAvaliacao.Rows[i]["ID"].ToString());
avaliacao.Valor = Convert.ToDecimal(dtAvaliacao.Rows[i]["Valor"]);
avaliacao.IdIndicador = Convert.ToInt32(dtAvaliacao.Rows[i]["IDIndicador"]);
avaliacao.IdAvaliacaoCelula = Convert.ToInt32(dtAvaliacao.Rows[i]["IDAvaliacaoCelula"]);
avaliacao.Indicador = new IndicadorDA().ConsultarUmIndicador(new Indicador { ID = avaliacao.IdIndicador });
avaliacoes.Add(avaliacao);
}
}
#endregion
decimal nota = 0;
foreach (var aval in avaliacoes)
{
var meta = aval.Indicador.ValorMeta;
var valorAval = aval.Valor;
if (aval.Indicador.Comparativo.Equals(">"))
{
if (valorAval > meta)
nota = nota + aval.Indicador.Peso;
}
else if (aval.Indicador.Comparativo.Equals("<"))
{
if (valorAval < meta)
nota = nota + aval.Indicador.Peso;
}
else if (aval.Indicador.Comparativo.Equals("="))
{
if (valorAval >= meta)
nota = nota + aval.Indicador.Peso;
}
}
avaliacaoCelula.Valor = nota;
string sqlUpdate = "UPDATE AvaliacaoCelula SET Valor = @valor WHERE ID = @idAvaliacaoCelula";
SqlParameter prmUpdate = new SqlParameter("@idAvaliacaoCelula", avaliacaoCelula.ID);
SqlParameter prmUpdate2 = new SqlParameter("@valor", avaliacaoCelula.Valor);
SqlParameter[] parametrosUpdate = new SqlParameter[] { prmUpdate, prmUpdate2 };
int retorno = ConexaoSQLServer.Conexao.ExecutarComando(sqlUpdate, parametrosUpdate);
return retorno;
}
public int CalcularTodasNotas(int idMes)
{
StringBuilder sqlAvaliacaoCelula = new StringBuilder("SELECT * FROM AvaliacaoCelula WHERE IDMes = @mesID;");
SqlParameter prmAvaliacaoCelula = new SqlParameter("@mesID", idMes);
SqlParameter[] parametrosAvaliacaoCelula = new SqlParameter[] { prmAvaliacaoCelula };
DataTable dtAvaliacao = ConexaoSQLServer.Conexao.ExecutarConsulta(sqlAvaliacaoCelula.ToString(), parametrosAvaliacaoCelula);
List<AvaliacaoCelula> avaliacoesCelula = new List<AvaliacaoCelula>();
if (dtAvaliacao.Rows.Count > 0)
{
for (int i = 0; i < dtAvaliacao.Rows.Count; i++)
{
AvaliacaoCelula avaliacaoCelula = new AvaliacaoCelula();
avaliacaoCelula.ID = Convert.ToInt32(dtAvaliacao.Rows[i]["ID"].ToString());
avaliacaoCelula.Valor = Convert.ToDecimal(dtAvaliacao.Rows[i]["Valor"]);
avaliacaoCelula.IdSupervisor = Convert.ToInt32(dtAvaliacao.Rows[i]["IDSupervisor"]);
avaliacaoCelula.IdMes = Convert.ToInt32(dtAvaliacao.Rows[i]["IDMes"]);
avaliacoesCelula.Add(avaliacaoCelula);
}
}
int retorno = 0;
foreach (var avaliacaoCelula in avaliacoesCelula)
{
retorno = Calculo(avaliacaoCelula);
if (retorno != 1) break;
}
return retorno;
}
}
}
| |
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Collections.Generic;
/* Uni2DEditorGUIUtils
*
* Various common GUI drawing methods used by Uni2D inspectors.
*
* Editor use only.
*/
public static class Uni2DEditorGUIUtils
{
// Describes the GUI action performed by the user when displaying the animation frame GUI
public enum AnimationGUIAction
{
None,
MoveUp,
MoveDown,
AddUp,
AddDown,
Close
}
private class PopupCallbackInfo<T> where T : UnityEngine.Object
{
public static PopupCallbackInfo<T> instance = null;
private int m_iControlID = 0;
private T m_rValue = default( T );
private bool m_bHasBeenSetted = false;
public PopupCallbackInfo( int a_iControlID, T a_rValue )
{
m_iControlID = a_iControlID;
m_rValue = a_rValue;
}
// Returns the selected value
public static T GetSelectedValueForControl( int a_iControlID, T a_rValue )
{
if( Event.current.type == EventType.Repaint )
{
// Check if any instance exists and if it concerns the right control ID
if( instance != null && instance.m_iControlID == a_iControlID )
{
// Yes, if it has been setted, return the value and clear the instance ref.
if( instance.m_bHasBeenSetted )
{
GUI.changed = true;
a_rValue = instance.m_rValue;
}
instance = null;
}
}
// if not, return a_rValue
return a_rValue;
}
// Sets value selected in the popup menu
public void SetValue( T a_rUserData )
{
m_rValue = a_rUserData;
m_bHasBeenSetted = true;
}
}
///// Custom popup callbacks /////
private static void AtlasPopupCallback( object a_rValue )
{
if( a_rValue != null && a_rValue is string )
{
PopupCallbackInfo<Uni2DTextureAtlas> rPopupCallbackInfoInstance = PopupCallbackInfo<Uni2DTextureAtlas>.instance;
string rGUID = (string) a_rValue;
if( string.IsNullOrEmpty( rGUID ) )
{
rPopupCallbackInfoInstance.SetValue( null );
}
else if( rGUID == "NEW" )
{
string oTextureAtlasPath = EditorUtility.SaveFilePanelInProject( "Create new Uni2D texture atlas",
"TextureAtlas_New",
"prefab",
"Create a new Uni2D texture atlas:" );
if( string.IsNullOrEmpty( oTextureAtlasPath ) == false )
{
// TODO: refactor with Sprite Builder Window
// Create model
GameObject oPrefabModel = new GameObject( );
oPrefabModel.AddComponent<Uni2DTextureAtlas>( );
// Save it as a prefab
GameObject rTextureAtlasGameObject = PrefabUtility.CreatePrefab( oTextureAtlasPath, oPrefabModel );
// Destroy model
GameObject.DestroyImmediate( oPrefabModel );
rPopupCallbackInfoInstance.SetValue( rTextureAtlasGameObject.GetComponent<Uni2DTextureAtlas>( ) );
}
}
else
{
rPopupCallbackInfoInstance.SetValue( Uni2DEditorUtils.GetAssetFromUnityGUID<Uni2DTextureAtlas>( rGUID ) );
}
}
}
private static void ClipPopupCallback( object a_rValue )
{
if( a_rValue != null && a_rValue is string )
{
PopupCallbackInfo<Uni2DAnimationClip> rPopupCallbackInfoInstance = PopupCallbackInfo<Uni2DAnimationClip>.instance;
string rGUID = (string) a_rValue;
if( string.IsNullOrEmpty( rGUID ) )
{
rPopupCallbackInfoInstance.SetValue( null );
}
else if( rGUID == "NEW" )
{
string oAnimationClipPath = EditorUtility.SaveFilePanelInProject( "Create new Uni2D animation clip",
"AnimationClip_New",
"prefab",
"Create a new Uni2D animation clip:" );
if( string.IsNullOrEmpty( oAnimationClipPath ) == false )
{
// TODO: refactor with Sprite Builder Window
// Create model
GameObject oPrefabModel = new GameObject( );
oPrefabModel.AddComponent<Uni2DAnimationClip>( );
// Save it as a prefab
GameObject rAnimationClipGameObject = PrefabUtility.CreatePrefab( oAnimationClipPath, oPrefabModel );
// Destroy model
GameObject.DestroyImmediate( oPrefabModel );
rPopupCallbackInfoInstance.SetValue( rAnimationClipGameObject.GetComponent<Uni2DAnimationClip>( ) );
}
}
else
{
rPopupCallbackInfoInstance.SetValue( Uni2DEditorUtils.GetAssetFromUnityGUID<Uni2DAnimationClip>( rGUID ) );
}
}
}
// Set Dirty Safe
public static void SetDirtySafe(Object a_rObject)
{
if(a_rObject != null)
{
EditorUtility.SetDirty(a_rObject);
}
}
// Displays a preview of an animation clip at given position rect
// The animation is played when the mouse is hovering the preview
public static void DrawAnimationClipPreview( Rect a_rPositionRect, Uni2DAnimationClip a_rAnimationClip, Uni2DAnimationPlayer a_rAnimationPlayer )
{
Uni2DAnimationFrame rFrame;
Texture2D rFrameTexture;
if( a_rPositionRect.Contains( Event.current.mousePosition ) ) // If mouse hovering preview rect...
{
if( a_rAnimationPlayer.Enabled == false || a_rAnimationPlayer.Clip != a_rAnimationClip ) // ... and player not enabled or not set with current clip...
{
a_rAnimationPlayer.Play( a_rAnimationClip ); // ... play clip
}
rFrame = a_rAnimationPlayer.Frame;
}
else if( Event.current.type != EventType.Layout && a_rAnimationPlayer.Enabled && a_rAnimationPlayer.Clip == a_rAnimationClip ) // Stop player if playing current clip
{
a_rAnimationPlayer.Stop( false );
a_rAnimationPlayer.FrameIndex = 0;
rFrame = a_rAnimationPlayer.Frame;
}
else // Use first clip frame otherwise (if any)
{
rFrame = a_rAnimationClip != null && a_rAnimationClip.frames != null && a_rAnimationClip.frames.Count > 0 ? a_rAnimationClip.frames[ 0 ] : null;
}
if( rFrame == null || rFrame.textureContainer == null || rFrame.textureContainer.Texture == null )
{
rFrameTexture = EditorGUIUtility.whiteTexture;
}
else
{
rFrameTexture = rFrame.textureContainer;
}
EditorGUI.DrawPreviewTexture( a_rPositionRect, rFrameTexture );
return;
}
// Displays the animation frame GUI
// Returns the performed user action
public static AnimationGUIAction DisplayAnimationFrame( Uni2DAnimationFrame a_rAnimationFrame, Uni2DTextureAtlas a_rGlobalAtlas, ref bool a_bEventFoldout )
{
AnimationGUIAction eAction = AnimationGUIAction.None;
// Box
EditorGUILayout.BeginVertical( EditorStyles.textField );
{
///// Top toolbar /////
EditorGUILayout.BeginHorizontal( EditorStyles.toolbar, GUILayout.ExpandWidth( true ) );
{
// ^
if( GUILayout.Button( "\u25B2" /*"\u2191"*/, EditorStyles.toolbarButton, GUILayout.ExpandWidth( false ) ) )
{
eAction = AnimationGUIAction.MoveUp;
}
// v
if( GUILayout.Button( "\u25BC" /*"\u2193"*/, EditorStyles.toolbarButton, GUILayout.ExpandWidth( false ) ) )
{
eAction = AnimationGUIAction.MoveDown;
}
// + ^
if( GUILayout.Button( "+ \u25B2" /*"+ \u2191"*/, EditorStyles.toolbarButton, GUILayout.ExpandWidth( true ) ) )
{
eAction = AnimationGUIAction.AddUp;
}
// X
if( GUILayout.Button( "X", EditorStyles.toolbarButton, GUILayout.ExpandWidth( false ) ) )
{
eAction = AnimationGUIAction.Close;
}
}
EditorGUILayout.EndHorizontal( );
///////////////
EditorGUILayout.Space( );
///// Animation Frame box /////
EditorGUILayout.BeginHorizontal( );
{
Texture2D rFrameTexture = a_rAnimationFrame.textureContainer;
string rFrameTextureGUID = a_rAnimationFrame.textureContainer.GUID;
bool bHasFrameTextureChanged;
// Display frame texture on the left
Rect oClipTextureRect = GUILayoutUtility.GetRect( 64.0f, 64.0f, 64.0f, 64.0f, GUILayout.ExpandWidth( false ) );
EditorGUI.BeginChangeCheck( );
{
rFrameTexture = (Texture2D) EditorGUI.ObjectField( oClipTextureRect, GUIContent.none, rFrameTexture, typeof( Texture2D ), false );
}
bHasFrameTextureChanged = EditorGUI.EndChangeCheck( );
EditorGUILayout.BeginVertical( GUILayout.ExpandWidth( true ) );
{
// Frame texture name
GUILayout.Label( rFrameTexture != null ? rFrameTexture.name : "(No Texture)", EditorStyles.boldLabel, GUILayout.ExpandWidth( false ) );
// Frame Name
a_rAnimationFrame.name = EditorGUILayout.TextField( "Frame Name", a_rAnimationFrame.name);
// Frame atlas
EditorGUILayout.BeginHorizontal( );
{
// Disable popup menu if global atlas is set
EditorGUI.BeginDisabledGroup( a_rGlobalAtlas != null );
{
// Atlas popup
string[ ] oTextureGUID = ( rFrameTexture != null ) ? new string[ 1 ]{ rFrameTextureGUID } : new string[ 0 ];
EditorGUILayout.PrefixLabel( "Use Atlas" );
a_rAnimationFrame.atlas = Uni2DEditorGUIUtils.AtlasPopup( a_rAnimationFrame.atlas, oTextureGUID );
}
EditorGUI.EndDisabledGroup( );
// Atlas select button
EditorGUI.BeginDisabledGroup( a_rAnimationFrame.atlas == null);
{
if( GUILayout.Button( "Select", GUILayout.Width( 80.0f ) ) )
{
EditorGUIUtility.PingObject( a_rAnimationFrame.atlas.gameObject );
}
}
EditorGUI.EndDisabledGroup( );
}
EditorGUILayout.EndHorizontal( );
// Trigger?
a_rAnimationFrame.triggerEvent = EditorGUILayout.Toggle( "Trigger Event", a_rAnimationFrame.triggerEvent );
// Event param
a_bEventFoldout = EditorGUILayout.Foldout( a_bEventFoldout, "Frame Infos" );
if( a_bEventFoldout )
{
Uni2DAnimationFrameInfos rFrameInfos = a_rAnimationFrame.frameInfos;
++EditorGUI.indentLevel;
{
rFrameInfos.stringInfo = EditorGUILayout.TextField( "String Info", rFrameInfos.stringInfo );
rFrameInfos.intInfo = EditorGUILayout.IntField( "Int Info", rFrameInfos.intInfo );
rFrameInfos.floatInfo = EditorGUILayout.FloatField( "Float Info", rFrameInfos.floatInfo );
rFrameInfos.objectInfo = EditorGUILayout.ObjectField( "Object Info", rFrameInfos.objectInfo, typeof( Object ), true );
}
--EditorGUI.indentLevel;
}
EditorGUILayout.Space( );
}
EditorGUILayout.EndVertical( );
if( bHasFrameTextureChanged )
{
// Save texture in texture container, keep reference to the asset if not using an atlas
a_rAnimationFrame.textureContainer = new Texture2DContainer( rFrameTexture, a_rGlobalAtlas == null );
}
}
EditorGUILayout.EndHorizontal( );
///////////////
EditorGUILayout.Space( );
///// Bottom toolbar /////
EditorGUILayout.BeginHorizontal( EditorStyles.toolbar, GUILayout.ExpandWidth( true ) );
{
// + v
if( GUILayout.Button( "+ \u25BC" /*"+ \u2193"*/, EditorStyles.toolbarButton, GUILayout.ExpandWidth( true ) ) )
{
eAction = AnimationGUIAction.AddDown;
}
}
EditorGUILayout.EndHorizontal( );
}
EditorGUILayout.EndVertical( );
return eAction;
}
// Displays an animation clip header
public static AnimationGUIAction DisplayCompactAnimationClipHeader( Uni2DAnimationClip a_rAnimationClip, Uni2DAnimationPlayer a_rAnimationPreviewPlayer, int a_iClipIndex = -1 )
{
AnimationGUIAction eAction;
Uni2DEditorGUIUtils.DoDisplayAnimationClipHeader( a_rAnimationClip, a_rAnimationPreviewPlayer, out eAction, true, false, a_iClipIndex );
return eAction;
}
public static bool DisplayAnimationClipHeader( Uni2DAnimationClip a_rAnimationClip, Uni2DAnimationPlayer a_rAnimationPreviewPlayer, bool a_bFolded )
{
AnimationGUIAction eAction;
return Uni2DEditorGUIUtils.DoDisplayAnimationClipHeader( a_rAnimationClip, a_rAnimationPreviewPlayer, out eAction, false, a_bFolded );
}
private static bool DoDisplayAnimationClipHeader( Uni2DAnimationClip a_rAnimationClip, Uni2DAnimationPlayer a_rAnimationPreviewPlayer, out AnimationGUIAction a_eAction, bool a_bCompactMode, bool a_bFolded, int iClipIndex = -1 )
{
a_eAction = AnimationGUIAction.None;
///// Clip header /////
EditorGUILayout.BeginVertical( EditorStyles.textField );
{
if( a_bCompactMode )
{
///// Top toolbar /////
EditorGUILayout.BeginHorizontal( EditorStyles.toolbar, GUILayout.ExpandWidth( true ) );
{
// ^
if( GUILayout.Button( "\u25B2" /*"\u2191"*/, EditorStyles.toolbarButton, GUILayout.ExpandWidth( false ) ) )
{
a_eAction = AnimationGUIAction.MoveUp;
}
// v
if( GUILayout.Button( "\u25BC" /*"\u2193"*/, EditorStyles.toolbarButton, GUILayout.ExpandWidth( false ) ) )
{
a_eAction = AnimationGUIAction.MoveDown;
}
if( iClipIndex > -1 )
{
if( GUILayout.Button( "Clip #" + iClipIndex, EditorStyles.toolbarButton, GUILayout.ExpandWidth( true ) ) )
{
EditorGUIUtility.PingObject( a_rAnimationClip.gameObject );
}
}
else
{
GUILayout.FlexibleSpace( );
}
// X
if( GUILayout.Button( "X", EditorStyles.toolbarButton, GUILayout.ExpandWidth( false ) ) )
{
a_eAction = AnimationGUIAction.Close;
}
}
EditorGUILayout.EndHorizontal( );
}
///////////////
EditorGUILayout.Space( );
EditorGUILayout.BeginHorizontal( );
{
Rect oClipTextureRect = GUILayoutUtility.GetRect( 64.0f, 64.0f, 64.0f, 64.0f, GUILayout.ExpandWidth( false ) );
// Animation clip preview
Uni2DEditorGUIUtils.DrawAnimationClipPreview( oClipTextureRect, a_rAnimationClip, a_rAnimationPreviewPlayer );
// Clip settings
EditorGUILayout.BeginVertical( );
{
//string rName;
float fFrameRate;
Uni2DAnimationClip.WrapMode eWrapMode;
// Name
//rName = EditorGUILayout.TextField( "Clip Name", a_rAnimationClip.name );
EditorGUILayout.LabelField( a_rAnimationClip.name, EditorStyles.boldLabel );
EditorGUI.BeginChangeCheck( );
{
EditorGUI.BeginChangeCheck( );
{
// Frame rate
fFrameRate = EditorGUILayout.FloatField( "Frame Rate", a_rAnimationClip.frameRate );
// Wrap mode
eWrapMode = (Uni2DAnimationClip.WrapMode) EditorGUILayout.EnumPopup( "Wrap Mode", a_rAnimationClip.wrapMode );
}
if( EditorGUI.EndChangeCheck( ) )
{
// Update animation players settings
a_rAnimationPreviewPlayer.FrameRate = fFrameRate;
a_rAnimationPreviewPlayer.WrapMode = eWrapMode;
}
}
if( EditorGUI.EndChangeCheck( ) )
{
//a_rAnimationClip.name = rName;
a_rAnimationClip.frameRate = fFrameRate;
a_rAnimationClip.wrapMode = eWrapMode;
if( a_bCompactMode )
{
a_rAnimationClip.ApplySettings( Uni2DAnimationClip.AnimationClipRegeneration.RegenerateNothing );
}
EditorUtility.SetDirty( a_rAnimationClip );
}
// Clip length infos
// TODO: refactor with AnimationPlayer
int iClipFrameCount = a_rAnimationClip.FrameCount;
int iWrappedFrameCount = iClipFrameCount;
if( a_rAnimationClip.wrapMode == Uni2DAnimationClip.WrapMode.PingPong && iWrappedFrameCount > 2 )
{
iWrappedFrameCount = ( iWrappedFrameCount * 2 - 2 );
}
float fClipLength = Mathf.Abs( iWrappedFrameCount / a_rAnimationClip.frameRate );
EditorGUILayout.BeginHorizontal( );
{
EditorGUILayout.LabelField( iClipFrameCount + " frame(s) = "
+ ( iClipFrameCount != iWrappedFrameCount
? ( iWrappedFrameCount + " wrapped frame(s) = " )
: null
)
+ fClipLength + " sec. @ "
+ a_rAnimationClip.frameRate + " FPS",
EditorStyles.miniLabel, GUILayout.ExpandWidth( false ) );
if( a_bCompactMode && GUILayout.Button( "Edit", EditorStyles.miniButton, GUILayout.ExpandWidth( true ) ) )
{
Selection.activeObject = a_rAnimationClip;
}
}
EditorGUILayout.EndHorizontal( );
}
EditorGUILayout.EndVertical( );
}
EditorGUILayout.EndHorizontal( );
// Frame foldout
if( !a_bCompactMode )
{
EditorGUILayout.BeginHorizontal( );
{
a_bFolded = EditorGUILayout.Foldout( a_bFolded, GUIContent.none );
}
EditorGUILayout.EndHorizontal( );
}
else
{
EditorGUILayout.Space( );
}
}
EditorGUILayout.EndVertical( );
return a_bFolded;
}
public static Uni2DTextureAtlas AtlasPopup( Uni2DTextureAtlas a_rTextureAtlas, IEnumerable<string> a_rTextureGUIDs, params GUILayoutOption[ ] a_rGUILayoutOptions )
{
// Get button control ID
int iControlID = GUIUtility.GetControlID( FocusType.Native );
// Get selected value for our control
// If no PopupCallbackInfo instance exists, the returned value is a_rTextureAtlas
a_rTextureAtlas = PopupCallbackInfo<Uni2DTextureAtlas>.GetSelectedValueForControl( iControlID, a_rTextureAtlas );
// Create a new generic menu
// Each item menu will use AtlasPopupCallback as callback
// AtlasPopupCallback will perform the logic and save the selected atlas to
// the PopupCallbackInfo instance.
string oPopupSelected = EditorGUI.showMixedValue ? "-" : ( a_rTextureAtlas != null ? a_rTextureAtlas.name : "(None)" );
if( GUILayout.Button( oPopupSelected, EditorStyles.popup, a_rGUILayoutOptions ) )
{
string rAtlasGUID = Uni2DEditorUtils.GetUnityAssetGUID( a_rTextureAtlas );
// Create a new popup callback info (control ID) and save it as current instance
PopupCallbackInfo<Uni2DTextureAtlas>.instance = new PopupCallbackInfo<Uni2DTextureAtlas>( iControlID, a_rTextureAtlas );
// Create our generic menu
GenericMenu oPopupMenu = new GenericMenu( );
if( a_rTextureAtlas != null )
{
oPopupMenu.AddItem( new GUIContent( a_rTextureAtlas.name ), true, AtlasPopupCallback, rAtlasGUID );
oPopupMenu.AddSeparator( "" );
}
// "None" special item menu
oPopupMenu.AddItem( new GUIContent( "(None)", "No atlasing" ), a_rTextureAtlas == null, AtlasPopupCallback, "" );
oPopupMenu.AddSeparator( "" );
// "Create" special item menu
oPopupMenu.AddItem( new GUIContent( "Create a new atlas...", "Creates a new Uni2D atlas and add the texture(s) right away" ), false, AtlasPopupCallback, "NEW" );
Uni2DEditorAssetTable rAssetTable = Uni2DEditorAssetTable.Instance;
// List atlases containing the texture(s)
Dictionary<string, string> oAtlasesReadyToUse = rAssetTable.GetAtlasNamesUsingTheseTextures( a_rTextureGUIDs );
if( !string.IsNullOrEmpty( rAtlasGUID ) )
{
oAtlasesReadyToUse.Remove( rAtlasGUID );
}
if( oAtlasesReadyToUse.Count > 0 )
{
oPopupMenu.AddSeparator( "" );
// Add an item menu for each ready to use atlas
foreach( KeyValuePair<string, string> rAtlasNameGUIDPair in oAtlasesReadyToUse )
{
oPopupMenu.AddItem( new GUIContent( rAtlasNameGUIDPair.Value ), rAtlasNameGUIDPair.Key == rAtlasGUID, AtlasPopupCallback, rAtlasNameGUIDPair.Key );
}
}
// List all available atlases
Dictionary<string, string> oAvailableAtlases = rAssetTable.GetAllAtlasNames( );
if( oAvailableAtlases.Count > 0 )
{
oPopupMenu.AddSeparator( "" );
// Add an item menu for each available atlas, in a submenu
foreach( KeyValuePair<string, string> rAtlasNameGUIDPair in oAvailableAtlases )
{
oPopupMenu.AddItem( new GUIContent( "All atlases/" + rAtlasNameGUIDPair.Value ), rAtlasNameGUIDPair.Key == rAtlasGUID, AtlasPopupCallback, rAtlasNameGUIDPair.Key );
}
}
// Finally show up the menu
oPopupMenu.ShowAsContext( );
}
return a_rTextureAtlas;
}
public static bool AddClipPopup( string a_rLabel, out Uni2DAnimationClip a_rAnimationClip, params GUILayoutOption[ ] a_rGUILayoutOptions )
{
bool bHasChanged;
// Get button control ID
int iControlID = GUIUtility.GetControlID( FocusType.Native );
EditorGUI.BeginChangeCheck( );
{
// Get selected value for our control
// If no PopupCallbackInfo instance exists, the returned value is a_rClip
a_rAnimationClip = PopupCallbackInfo<Uni2DAnimationClip>.GetSelectedValueForControl( iControlID, null );
}
bHasChanged = EditorGUI.EndChangeCheck( );
// Create a new generic menu
// Each item menu will use AtlasPopupCallback as callback
// AtlasPopupCallback will perform the logic and save the selected atlas to
// the PopupCallbackInfo instance.
if( GUILayout.Button( a_rLabel, a_rGUILayoutOptions ) )
{
// Create a new popup callback info (control ID) and save it as current instance
PopupCallbackInfo<Uni2DAnimationClip>.instance = new PopupCallbackInfo<Uni2DAnimationClip>( iControlID, null );
// Create our generic menu
GenericMenu oPopupMenu = new GenericMenu( );
// "Create" special item menu
oPopupMenu.AddItem( new GUIContent( "Create a new animation clip...", "Creates a new Uni2D animation clip" ), false, ClipPopupCallback, "NEW" );
Uni2DEditorAssetTable rAssetTable = Uni2DEditorAssetTable.Instance;
// List all available atlases
Dictionary<string, string> oAvailableAnimationClips = rAssetTable.GetAllClipNames( );
if( oAvailableAnimationClips.Count != 0 )
{
oPopupMenu.AddSeparator( "" );
// Add an item menu for each ready to use atlas
foreach( KeyValuePair<string, string> rAnimationClipNameGUIDPair in oAvailableAnimationClips )
{
oPopupMenu.AddItem( new GUIContent( rAnimationClipNameGUIDPair.Value ), false, ClipPopupCallback, rAnimationClipNameGUIDPair.Key );
}
}
// Finally show up the menu
oPopupMenu.ShowAsContext( );
}
return bHasChanged;
}
// Templated enum popup control for serialized object
public static T SerializedEnumPopup<T>( SerializedSetting<T> a_rSerializedSetting, string a_rLabel = "" ) where T : struct
{
System.Enum eNewValue;
bool bSavedShowMixedValue = EditorGUI.showMixedValue;
EditorGUI.BeginChangeCheck( );
{
EditorGUI.showMixedValue = a_rSerializedSetting.HasMultipleDifferentValues;
eNewValue = EditorGUILayout.EnumPopup( a_rLabel, a_rSerializedSetting.Value as System.Enum );
}
T eResult = (T) System.Enum.Parse( typeof( T ), eNewValue.ToString( ) );
if( EditorGUI.EndChangeCheck( ) )
{
a_rSerializedSetting.Value = eResult;
}
EditorGUI.showMixedValue = bSavedShowMixedValue;
return eResult;
}
// Displays a popup menu filled with all potential atlases for the given input textures
// Displays all available atlases if a_rTexturesToContain is null
// Manages the serialized settings update
public static Uni2DTextureAtlas SerializedAtlasPopup( SerializedSetting<Uni2DTextureAtlas> a_rSerializedTextureAtlas, IEnumerable<string> a_rTextureGUIDsToContain = null )
{
Uni2DTextureAtlas rCurrentAtlas = a_rSerializedTextureAtlas.HasMultipleDifferentValues ? null : a_rSerializedTextureAtlas.Value;
bool bSavedShowMixedValue = EditorGUI.showMixedValue;
EditorGUI.showMixedValue = a_rSerializedTextureAtlas.HasMultipleDifferentValues;
{
EditorGUI.BeginChangeCheck( );
{
rCurrentAtlas = Uni2DEditorGUIUtils.AtlasPopup( rCurrentAtlas, a_rTextureGUIDsToContain );
}
if( EditorGUI.EndChangeCheck( ) )
{
a_rSerializedTextureAtlas.Value = rCurrentAtlas;
}
}
EditorGUI.showMixedValue = bSavedShowMixedValue;
return rCurrentAtlas;
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using Microsoft.Scripting.Runtime;
using IronPython.Runtime.Operations;
namespace IronPython.Runtime.Types {
[PythonType("mappingproxy")]
public sealed class MappingProxy : IDictionary<object, object>, IDictionary {
internal PythonDictionary GetDictionary(CodeContext context) => dictionary ?? type.GetMemberDictionary(context, false);
private readonly PythonDictionary dictionary;
private readonly PythonType type;
internal MappingProxy(CodeContext context, PythonType/*!*/ dt) {
Debug.Assert(dt != null);
type = dt;
}
public MappingProxy([NotNull]PythonDictionary dict) {
dictionary = dict;
}
#region Python Public API Surface
public int __len__(CodeContext context) => GetDictionary(context).Count;
public bool __contains__(CodeContext/*!*/ context, object value) => GetDictionary(context).TryGetValue(value, out _);
public string/*!*/ __str__(CodeContext/*!*/ context) => DictionaryOps.__repr__(context, this);
public object get(CodeContext/*!*/ context, [NotNull]object k, object d=null) {
object res;
if (!GetDictionary(context).TryGetValue(k, out res)) {
res = d;
}
return res;
}
public object keys(CodeContext context) {
var dict = GetDictionary(context);
if (dict.GetType() == typeof(PythonDictionary)) return dict.keys();
PythonTypeOps.TryInvokeUnaryOperator(context, dict, nameof(dict.keys), out object keys);
return keys;
}
public object values(CodeContext context) {
var dict = GetDictionary(context);
if (dict.GetType() == typeof(PythonDictionary)) return dict.values();
PythonTypeOps.TryInvokeUnaryOperator(context, dict, nameof(dict.values), out object values);
return values;
}
public object items(CodeContext context) {
var dict = GetDictionary(context);
if (dict.GetType() == typeof(PythonDictionary)) return dict.items();
PythonTypeOps.TryInvokeUnaryOperator(context, dict, nameof(dict.items), out object items);
return items;
}
public PythonDictionary copy(CodeContext/*!*/ context) => new PythonDictionary(context, this);
public const object __hash__ = null;
public object __eq__(CodeContext/*!*/ context, object other) {
if (other is MappingProxy proxy) {
if (type == null) {
return __eq__(context, proxy.GetDictionary(context));
}
return type == proxy.type;
}
if (other is PythonDictionary) {
return ((IStructuralEquatable)GetDictionary(context)).Equals(other, DefaultContext.DefaultPythonContext.EqualityComparerNonGeneric);
}
return false;
}
#endregion
#region IDictionary Members
public object this[object key] {
get => GetDictionary(DefaultContext.Default)[key];
[PythonHidden]
set => throw PythonOps.TypeError("'mappingproxy' object does not support item assignment");
}
bool IDictionary.Contains(object key) => __contains__(DefaultContext.Default, key);
#endregion
#region IEnumerable Members
IEnumerator IEnumerable.GetEnumerator() => GetDictionary(DefaultContext.Default).keys().GetEnumerator();
#endregion
#region IDictionary Members
[PythonHidden]
public void Add(object key, object value) {
this[key] = value;
}
[PythonHidden]
public void Clear() => throw new InvalidOperationException("mappingproxy is read-only");
IDictionaryEnumerator IDictionary.GetEnumerator() => ((IDictionary)GetDictionary(DefaultContext.Default)).GetEnumerator();
bool IDictionary.IsFixedSize => true;
bool IDictionary.IsReadOnly => true;
ICollection IDictionary.Keys {
get {
ICollection<object> res = GetDictionary(DefaultContext.Default).Keys;
if (res is ICollection coll) {
return coll;
}
return new List<object>(res);
}
}
void IDictionary.Remove(object key) => throw new InvalidOperationException("mappingproxy is read-only");
ICollection IDictionary.Values {
get {
var res = new List<object>();
foreach (KeyValuePair<object, object> kvp in GetDictionary(DefaultContext.Default)) {
res.Add(kvp.Value);
}
return res;
}
}
#endregion
#region ICollection Members
void ICollection.CopyTo(Array array, int index) {
foreach (DictionaryEntry de in (IDictionary)this) {
array.SetValue(de, index++);
}
}
int ICollection.Count => __len__(DefaultContext.Default);
bool ICollection.IsSynchronized => false;
object ICollection.SyncRoot => this;
#endregion
#region IDictionary<object,object> Members
bool IDictionary<object, object>.ContainsKey(object key) => __contains__(DefaultContext.Default, key);
ICollection<object> IDictionary<object, object>.Keys => GetDictionary(DefaultContext.Default).Keys;
bool IDictionary<object, object>.Remove(object key) => throw new InvalidOperationException("mappingproxy is read-only");
bool IDictionary<object, object>.TryGetValue(object key, out object value) => GetDictionary(DefaultContext.Default).TryGetValue(key, out value);
ICollection<object> IDictionary<object, object>.Values => GetDictionary(DefaultContext.Default).Values;
#endregion
#region ICollection<KeyValuePair<object,object>> Members
void ICollection<KeyValuePair<object, object>>.Add(KeyValuePair<object, object> item) {
this[item.Key] = item.Value;
}
bool ICollection<KeyValuePair<object, object>>.Contains(KeyValuePair<object, object> item) => __contains__(DefaultContext.Default, item.Key);
void ICollection<KeyValuePair<object, object>>.CopyTo(KeyValuePair<object, object>[] array, int arrayIndex) {
foreach (KeyValuePair<object, object> de in (IEnumerable<KeyValuePair<object, object>>)this) {
array.SetValue(de, arrayIndex++);
}
}
int ICollection<KeyValuePair<object, object>>.Count => __len__(DefaultContext.Default);
bool ICollection<KeyValuePair<object, object>>.IsReadOnly => true;
bool ICollection<KeyValuePair<object, object>>.Remove(KeyValuePair<object, object> item) => ((IDictionary<object, object>)this).Remove(item.Key);
#endregion
#region IEnumerable<KeyValuePair<object,object>> Members
IEnumerator<KeyValuePair<object, object>> IEnumerable<KeyValuePair<object, object>>.GetEnumerator() => GetDictionary(DefaultContext.Default).GetEnumerator();
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Text;
using System.Windows.Forms;
using System.Drawing.Drawing2D;
using System.Drawing.Text;
using System.IO;
namespace ProGauges
{
public partial class LinearGauge : UserControl, ISupportInitialize
{
private PrivateFontCollection m_pfc = new PrivateFontCollection();
private Color m_BackGroundColor = Color.FromArgb(24,24,24);
private bool m_bIsInitializing;
private Rectangle rcentre;
private int m_NumberOfDecimals = 0;
public int NumberOfDecimals
{
get { return m_NumberOfDecimals; }
set { m_NumberOfDecimals = value; }
}
private float m_MaxPeakHoldValue = float.MinValue;
private bool m_ShowRanges = true;
private System.Timers.Timer peakholdtimer = new System.Timers.Timer();
[Browsable(true), Category("LinearGauge"), Description("Toggles ranges display on/off"), DefaultValue(typeof(bool), "true")]
public bool ShowRanges
{
get { return m_ShowRanges; }
set
{
if (m_ShowRanges != value)
{
m_ShowRanges = value;
base.Invalidate();
}
}
}
private bool m_ShowValue = true;
[Browsable(true), Category("LinearGauge"), Description("Toggles value display on/off"), DefaultValue(typeof(bool), "true")]
public bool ShowValue
{
get { return m_ShowValue; }
set
{
if (m_ShowValue != value)
{
m_ShowValue = value;
base.Invalidate();
}
}
}
private bool m_ShowValueInPercentage = false;
[Browsable(true), Category("LinearGauge"), Description("Toggles value display from actual values to percentage"), DefaultValue(typeof(bool), "false")]
public bool ShowValueInPercentage
{
get { return m_ShowValueInPercentage; }
set
{
if (m_ShowValueInPercentage != value)
{
m_ShowValueInPercentage = value;
base.Invalidate();
}
}
}
private string m_gaugeText = "Linear gauge";
[Browsable(true), Category("LinearGauge"), Description("Sets the text for the gauge"), DefaultValue(typeof(bool), "Linear gauge")]
public string GaugeText
{
get { return m_gaugeText; }
set
{
if (m_gaugeText != value)
{
m_gaugeText = value;
base.Invalidate();
}
}
}
private string m_gaugeUnits = "units";
[Browsable(true), Category("LinearGauge"), Description("Sets the units for the gauge"), DefaultValue(typeof(bool), "units")]
public string GaugeUnits
{
get { return m_gaugeUnits; }
set
{
if (m_gaugeUnits != value)
{
m_gaugeUnits = value;
base.Invalidate();
}
}
}
[Browsable(true), Category("LinearGauge"), Description("Set the gauge background color"), DefaultValue(typeof(Color), "System.Drawing.Color.Black")]
public Color BackGroundColor
{
get { return m_BackGroundColor; }
set
{
m_BackGroundColor = value;
base.Invalidate();
}
}
private Color m_BevelLineColor = Color.Gray;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge bevel color"), DefaultValue(typeof(Color), "System.Drawing.Color.Gray")]
public Color BevelLineColor
{
get { return m_BevelLineColor; }
set
{
m_BevelLineColor = value;
base.Invalidate();
}
}
private int m_peakholdOpaque = 0;
private float m_value = 0;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge value"), DefaultValue(typeof(float), "0")]
public float Value
{
get { return m_value; }
set
{
if (m_value != value)
{
m_value = value;
base.Invalidate();
}
if (m_value > m_MaxPeakHoldValue)
{
m_MaxPeakHoldValue = m_value;
m_peakholdOpaque = 255;
}
}
}
private float m_recommendedValue = 25;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge recommended value"), DefaultValue(typeof(float), "25")]
public float RecommendedValue
{
get { return m_recommendedValue; }
set
{
if (m_recommendedValue != value)
{
m_recommendedValue = value;
base.Invalidate();
}
}
}
private int m_recommendedPercentage = 10;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge recommended percentage"), DefaultValue(typeof(int), "10")]
public int RecommendedPercentage
{
get { return m_recommendedPercentage; }
set
{
if (m_recommendedPercentage != value)
{
m_recommendedPercentage = value;
base.Invalidate();
}
}
}
private float m_thresholdValue = 90;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge threshold value"), DefaultValue(typeof(float), "90")]
public float ThresholdValue
{
get { return m_thresholdValue; }
set
{
if (m_thresholdValue != value)
{
m_thresholdValue = value;
base.Invalidate();
}
}
}
private float m_maxValue = 100;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge maximum scale value"), DefaultValue(typeof(float), "100")]
public float MaxValue
{
get { return m_maxValue; }
set
{
if (m_maxValue != value)
{
m_maxValue = value;
base.Invalidate();
}
}
}
private float m_minValue = 0;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge minimum scale value"), DefaultValue(typeof(float), "0")]
public float MinValue
{
get { return m_minValue; }
set
{
if (m_minValue != value)
{
m_minValue = value;
base.Invalidate();
}
}
}
private void LoadFont()
{
Stream fontStream = this.GetType().Assembly.GetManifestResourceStream("ProGauges.Eurostile.ttf");
if (fontStream != null)
{
byte[] fontdata = new byte[fontStream.Length];
fontStream.Read(fontdata, 0, (int)fontStream.Length);
fontStream.Close();
unsafe
{
fixed (byte* pFontData = fontdata)
{
m_pfc.AddMemoryFont((System.IntPtr)pFontData, fontdata.Length);
}
}
}
else
{
throw new Exception("Font could not be found");
}
}
public LinearGauge()
{
InitializeComponent();
base.SetStyle(ControlStyles.OptimizedDoubleBuffer, true);
base.SetStyle(ControlStyles.SupportsTransparentBackColor, true);
peakholdtimer.Interval = 50;
peakholdtimer.Elapsed += new System.Timers.ElapsedEventHandler(peakholdtimer_Elapsed);
peakholdtimer.Start();
GetCenterRectangle();
try
{
LoadFont();
System.Drawing.Font fn;
foreach (FontFamily ff in m_pfc.Families)
{
this.Font = fn = new Font(ff, 12, FontStyle.Bold);
}
}
catch (Exception E)
{
Console.WriteLine(E.Message);
}
}
void peakholdtimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e)
{
if (m_peakholdOpaque > 0) m_peakholdOpaque--;
else
{
m_MaxPeakHoldValue = float.MinValue;
}
}
void ISupportInitialize.BeginInit()
{
this.m_bIsInitializing = true;
}
void ISupportInitialize.EndInit()
{
this.m_bIsInitializing = false;
base.Invalidate();
}
protected override void OnPaintBackground(PaintEventArgs pevent)
{
if (!this.IsDisposed)
{
base.OnPaintBackground(pevent);
}
}
protected override void OnSizeChanged(EventArgs e)
{
if (!this.IsDisposed)
{
base.OnSizeChanged(e);
}
}
protected override void OnPaint(PaintEventArgs e)
{
if (!this.IsDisposed)
{
Graphics g = e.Graphics;
//g.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
g.InterpolationMode = InterpolationMode.HighQualityBicubic;
g.SmoothingMode = SmoothingMode.AntiAlias;
this.DrawBackground(g);
if ((base.ClientRectangle.Height >= 85) && (base.ClientRectangle.Width >= 85))
{
this.DrawNumbers(g);
this.DrawText(g);
}
this.DrawScale(g);
this.DrawRanges(g);
this.DrawHighlight(g);
} //g.Dispose();
}
private bool m_bShowHighlight = true;
[Browsable(true), Category("LinearGauge"), Description("Switches highlighting of the gauge on and off"), DefaultValue(typeof(bool), "true")]
public bool ShowHighlight
{
get { return m_bShowHighlight; }
set
{
if (m_bShowHighlight != value)
{
m_bShowHighlight = value;
base.Invalidate();
}
}
}
private byte m_nHighlightOpaqueEnd = 30;
[DefaultValue(50), Browsable(true), Category("LinearGauge"), Description("Set the opaque value of the highlight")]
public byte HighlightOpaqueEnd
{
get
{
return this.m_nHighlightOpaqueEnd;
}
set
{
if (value > 100)
{
throw new ArgumentException("This value should be between 0 and 50");
}
if (this.m_nHighlightOpaqueEnd != value)
{
this.m_nHighlightOpaqueEnd = value;
//if (!this.m_bIsInitializing)
//{
base.Invalidate();
//}
}
}
}
private byte m_nHighlightOpaqueStart = 100;
[DefaultValue(100), Browsable(true), Category("LinearGauge"), Description("Set the opaque start value of the highlight")]
public byte HighlightOpaqueStart
{
get
{
return this.m_nHighlightOpaqueStart;
}
set
{
if (value > 255)
{
throw new ArgumentException("This value should be between 0 and 50");
}
if (this.m_nHighlightOpaqueStart != value)
{
this.m_nHighlightOpaqueStart = value;
//if (!this.m_bIsInitializing)
//{
base.Invalidate();
//}
}
}
}
private void DrawHighlight(Graphics g)
{
if (this.m_bShowHighlight)
{
Rectangle clientRectangle = base.ClientRectangle;
clientRectangle.Height = clientRectangle.Height >> 1;
clientRectangle.Inflate(-2, -2);
Color color = Color.FromArgb(this.m_nHighlightOpaqueStart, 0xff, 0xff, 0xff);
Color color2 = Color.FromArgb(this.m_nHighlightOpaqueEnd, 0xff, 0xff, 0xff);
this.DrawRoundRect(g, clientRectangle, /*((this.m_nCornerRadius - 1) > 1) ? ((float)(this.m_nCornerRadius - 1)) :*/ ((float)1), color, color2, Color.Empty, 0, true, false);
}
else
{
/*Rectangle clientRectangle = base.ClientRectangle;
clientRectangle.Height = clientRectangle.Height >> 1;
clientRectangle.Inflate(-2, -2);
Color color = Color.FromArgb(100, 0xff, 0xff, 0xff);
Color color2 = Color.FromArgb(this.m_nHighlightOpaque, 0xff, 0xff, 0xff);
Brush backGroundBrush = new SolidBrush(Color.FromArgb(120, Color.Silver));
g.FillEllipse(backGroundBrush, clientRectangle);*/
}
}
private void DrawRoundRect(Graphics g, Rectangle rect, float radius, Color col1, Color col2, Color colBorder, int nBorderWidth, bool bGradient, bool bDrawBorder)
{
GraphicsPath path = new GraphicsPath();
float width = radius + radius;
RectangleF ef = new RectangleF(0f, 0f, width, width);
Brush brush = null;
ef.X = rect.Left;
ef.Y = rect.Top;
path.AddArc(ef, 180f, 90f);
ef.X = (rect.Right - 1) - width;
path.AddArc(ef, 270f, 90f);
ef.Y = (rect.Bottom - 1) - width;
path.AddArc(ef, 0f, 90f);
ef.X = rect.Left;
path.AddArc(ef, 90f, 90f);
path.CloseFigure();
if (bGradient)
{
brush = new LinearGradientBrush(rect, col1, col2, 90f, false);
}
else
{
brush = new SolidBrush(col1);
}
//g.SmoothingMode = SmoothingMode.AntiAlias;
g.FillPath(brush, path);
if (/*bDrawBorder*/ true)
{
Pen pen = new Pen(colBorder);
pen.Width = nBorderWidth;
g.DrawPath(pen, path);
pen.Dispose();
}
g.SmoothingMode = SmoothingMode.None;
brush.Dispose();
path.Dispose();
}
private Color m_recommendedRangeColor = Color.LawnGreen;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge recommended range color"), DefaultValue(typeof(Color), "System.Drawing.Color.LawnGreen")]
public Color RecommendedRangeColor
{
get { return m_recommendedRangeColor; }
set
{
if (m_recommendedRangeColor != value)
{
m_recommendedRangeColor = value;
base.Invalidate();
}
}
}
private Color m_thresholdColor = Color.Firebrick;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge recommended range color"), DefaultValue(typeof(Color), "System.Drawing.Color.Firebrick")]
public Color ThresholdColor
{
get { return m_thresholdColor; }
set
{
if (m_thresholdColor != value)
{
m_thresholdColor = value;
base.Invalidate();
}
}
}
private Color m_startColor = Color.GreenYellow;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge start color"), DefaultValue(typeof(Color), "System.Drawing.Color.GreenYellow")]
public Color StartColor
{
get { return m_startColor; }
set
{
if (m_startColor != value)
{
m_startColor = value;
base.Invalidate();
}
}
}
private Color m_endColor = Color.OrangeRed;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge end color"), DefaultValue(typeof(Color), "System.Drawing.Color.OrangeRed")]
public Color EndColor
{
get { return m_endColor; }
set
{
if (m_endColor != value)
{
m_endColor = value;
base.Invalidate();
}
}
}
private int m_alphaForGaugeColors = 180;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge alpha value for the gauge colored bar"), DefaultValue(typeof(int), "180")]
public int AlphaForGaugeColors
{
get { return (m_alphaForGaugeColors*100)/255; }
set
{
int realvalue = value * 255 / 100;
if (m_alphaForGaugeColors != realvalue)
{
m_alphaForGaugeColors = realvalue;
base.Invalidate();
}
}
}
/// <summary>
/// Draws the recommended and threshold ranges on the gauge
/// </summary>
/// <param name="g"></param>
private void DrawRanges(Graphics g)
{
if (m_ShowRanges)
{
// draw recommended range
Pen p = new Pen(Color.FromArgb(m_alphaForGaugeColors, m_BevelLineColor));
float range = m_maxValue - m_minValue;
Rectangle scalerect = new Rectangle(rcentre.X, rcentre.Y + rcentre.Height + 1, rcentre.Width, 6);
//scalerect.Inflate(-1, -1);
if (m_recommendedValue >= m_minValue && m_recommendedValue < m_maxValue)
{
// calculate range based on percentage
// percentage = percentage of entire scale!
System.Drawing.Drawing2D.LinearGradientBrush gb = new System.Drawing.Drawing2D.LinearGradientBrush(scalerect, m_BackGroundColor, m_recommendedRangeColor, System.Drawing.Drawing2D.LinearGradientMode.Vertical);
float centerpercentage = (m_recommendedValue - m_minValue) / range;
float recommendedstartpercentage = centerpercentage;
recommendedstartpercentage -= (float)m_recommendedPercentage / 200;
float recommendedendpercentage = centerpercentage;
recommendedendpercentage += (float)m_recommendedPercentage / 200;
float startx = scalerect.Width * recommendedstartpercentage;
float endx = scalerect.Width * recommendedendpercentage;
float centerx = scalerect.Width * centerpercentage;
Rectangle startfillrect = new Rectangle(scalerect.X + (int)startx, scalerect.Y, (int)centerx - (int)startx, scalerect.Height);
Rectangle startcolorrect = startfillrect;
startcolorrect.Inflate(1, 0);
System.Drawing.Drawing2D.LinearGradientBrush gb1 = new System.Drawing.Drawing2D.LinearGradientBrush(startcolorrect, Color.Transparent, m_recommendedRangeColor, System.Drawing.Drawing2D.LinearGradientMode.Horizontal);
g.FillRectangle(gb1, startfillrect);
Rectangle endfillrect = new Rectangle(scalerect.X + (int)centerx, scalerect.Y, (int)endx - (int)centerx, scalerect.Height);
Rectangle endcolorrect = endfillrect;
endcolorrect.Inflate(1, 0);
System.Drawing.Drawing2D.LinearGradientBrush gb2 = new System.Drawing.Drawing2D.LinearGradientBrush(endcolorrect, m_recommendedRangeColor, Color.Transparent, System.Drawing.Drawing2D.LinearGradientMode.Horizontal);
g.FillRectangle(gb2, endfillrect);
g.DrawRectangle(p, startfillrect.X, startfillrect.Y, startfillrect.Width + endfillrect.Width, startfillrect.Height);
gb.Dispose();
gb1.Dispose();
gb2.Dispose();
}
// draw threshold
if (m_thresholdValue >= m_minValue && m_thresholdValue < m_maxValue)
{
// percentage
float percentage = (m_thresholdValue - m_minValue) / range;
if (percentage > 1) percentage = 1;
if (percentage < 0) percentage = 0;
float startx = scalerect.Width * percentage;
Rectangle fillrect = new Rectangle(scalerect.X + (int)startx, scalerect.Y, scalerect.Width-(int)startx, scalerect.Height);
Rectangle fillcolorrect = fillrect;
fillcolorrect.Inflate(1, 0);
System.Drawing.Drawing2D.LinearGradientBrush gb = new System.Drawing.Drawing2D.LinearGradientBrush(fillcolorrect, Color.Transparent, m_thresholdColor, System.Drawing.Drawing2D.LinearGradientMode.Horizontal);
g.FillRectangle(gb, fillrect);
// nog een rectangle erom heen?
g.DrawRectangle(p, fillrect);
gb.Dispose();
}
p.Dispose();
}
}
/// <summary>
/// Draws the actual scale on the gauge
/// </summary>
/// <param name="g"></param>
private void DrawScale(Graphics g)
{
Rectangle scalerect = rcentre;
scalerect.Inflate(-2, -2);
Color realstart = Color.FromArgb(m_alphaForGaugeColors, m_startColor);
Color realend = Color.FromArgb(m_alphaForGaugeColors, m_endColor);
scalerect = new Rectangle(scalerect.X + 1, scalerect.Y + 1, scalerect.Width, scalerect.Height);
System.Drawing.Drawing2D.LinearGradientBrush gb = new System.Drawing.Drawing2D.LinearGradientBrush(rcentre, realstart, realend, System.Drawing.Drawing2D.LinearGradientMode.Horizontal);
// percentage calulation
float range = m_maxValue - m_minValue;
float percentage = (m_value - m_minValue) / range;
//float percentage = (m_value) / (m_maxValue - m_minValue);
if (percentage > 1) percentage = 1;
float width = scalerect.Width * percentage;
Rectangle fillrect = new Rectangle(scalerect.X-1, scalerect.Y-1,(int)width , scalerect.Height+1);
g.FillRectangle(gb, fillrect);
// draw peak & hold?
if (m_MaxPeakHoldValue > float.MinValue && m_peakholdOpaque > 0)
{
Color peakholdcolor = Color.FromArgb(m_peakholdOpaque, Color.Red);
percentage = (m_MaxPeakHoldValue - m_minValue) / range;
if (percentage > 1) percentage = 1;
width = scalerect.Width * percentage;
g.DrawLine(new Pen(peakholdcolor, 3), new Point(scalerect.X - 1 + (int)width, scalerect.Y - 1), new Point(scalerect.X - 1 + (int)width, scalerect.Y + scalerect.Height));
}
gb.Dispose();
}
private int m_numberOfDivisions = 5;
[Browsable(true), Category("LinearGauge"), Description("Sets number of divisions that should be drawn"), DefaultValue(typeof(int), "5")]
public int NumberOfDivisions
{
get { return m_numberOfDivisions; }
set
{
if (m_numberOfDivisions != value)
{
m_numberOfDivisions = value;
base.Invalidate();
}
}
}
private Color m_TickColor = Color.Gray;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge tick color"), DefaultValue(typeof(Color), "System.Drawing.Color.Gray")]
public Color TickColor
{
get { return m_TickColor; }
set
{
if (m_TickColor != value)
{
m_TickColor = value;
base.Invalidate();
}
}
}
private int m_subTickCount = 4;
[Browsable(true), Category("LinearGauge"), Description("Sets number of sub divisions that should be drawn"), DefaultValue(typeof(int), "4")]
public int SubTickCount
{
get { return m_subTickCount; }
set
{
if (m_subTickCount != value)
{
m_subTickCount = value;
base.Invalidate();
}
}
}
private Color m_TextColor = Color.Silver;
[Browsable(true), Category("LinearGauge"), Description("Set the gauge text color"), DefaultValue(typeof(Color), "System.Drawing.Color.Silver")]
public Color TextColor
{
get { return m_TextColor; }
set
{
if (m_TextColor != value)
{
m_TextColor = value;
base.Invalidate();
}
}
}
/// <summary>
/// Draws the numbers above the center retangle
/// </summary>
/// <param name="g"></param>
private void DrawNumbers(Graphics g)
{
int y_offset = rcentre.Y - 20;
int x_offset = rcentre.X;
for (int t = 0; t < m_numberOfDivisions+1; t++)
{
int tickWidth = rcentre.Width / (m_numberOfDivisions);
int xPos = x_offset + t * tickWidth;
float fval = m_minValue + (t * ((m_maxValue-m_minValue)/m_numberOfDivisions));
string outstr = fval.ToString("F" + m_NumberOfDecimals.ToString());
// string outstr = fval.ToString("F0");
if (fval < 10 && fval > -10 && fval != 0) outstr = fval.ToString("F1");
if (fval < 1 && fval > -1 && fval != 0) outstr = fval.ToString("F2");
SizeF textSize = g.MeasureString(outstr, this.Font);
Pen p = new Pen(Color.FromArgb(80, m_TickColor));
g.DrawRectangle(p, new Rectangle(xPos, rcentre.Y + 1, 3, rcentre.Height - 2));
// subticks
SolidBrush sb = new SolidBrush(Color.FromArgb(80, m_TickColor));
if (t < m_numberOfDivisions)
{
for (int subt = 0; subt < m_subTickCount; subt++)
{
int subTickWidth = tickWidth / (m_subTickCount + 1);
int xPosSub = xPos + (subt+1) * subTickWidth;
g.FillEllipse(sb, xPosSub, rcentre.Y + (rcentre.Height / 2), 3, 3);
}
}
xPos -= (int)textSize.Width / 2;
SolidBrush sbtxt = new SolidBrush(m_TextColor);
g.DrawString(outstr, this.Font, sbtxt, new PointF((float)xPos, (float)y_offset));
p.Dispose();
sb.Dispose();
sbtxt.Dispose();
}
}
/// <summary>
/// Draws the text under the center retangle
/// </summary>
/// <param name="g"></param>
private void DrawText(Graphics g)
{
string text2display = m_gaugeText ;
if (m_ShowValue)
{
if (m_ShowValueInPercentage)
{
// add percentage to text
float range = m_maxValue - m_minValue;
float percentage = (m_value - m_minValue) / range;
percentage *= 100;
// and percentage sign
text2display += " " + percentage.ToString("F0") + " %";
}
else
{
// add value to text
//string strval = m_value.ToString("F0");
string strval = m_value.ToString("F" + m_NumberOfDecimals.ToString());
if (m_value > -10 && m_value < 10 && m_value != 0) strval = m_value.ToString("F1");
if (m_value > -1 && m_value < 1 && m_value != 0) strval = m_value.ToString("F2");
text2display += " " + strval;
// and units
text2display += " " + m_gaugeUnits;
}
}
SizeF textsize = g.MeasureString(text2display, this.Font);
SolidBrush sbtxt = new SolidBrush(m_TextColor);
int xPos = this.ClientRectangle.X + (this.ClientRectangle.Width /2) - ((int)textsize.Width/2);
g.DrawString(text2display, this.Font, sbtxt, new PointF((float)xPos, rcentre.Y + rcentre.Height + 10));
sbtxt.Dispose();
}
/// <summary>
/// Draws the background image for that gauge
/// </summary>
/// <param name="g"></param>
private void DrawBackground(Graphics g)
{
SolidBrush b = new SolidBrush(m_BackGroundColor);
g.FillRectangle(b, this.ClientRectangle);
RectangleF r = this.ClientRectangle;
r.Inflate(-3, -3);
Pen p = new Pen(m_BevelLineColor, 2);
g.DrawRectangle(p, new Rectangle((int)r.X, (int)r.Y, (int)r.Width, (int)r.Height));
g.DrawRectangle(Pens.DimGray, rcentre);
b.Dispose();
p.Dispose();
}
private void GetCenterRectangle()
{
rcentre = new Rectangle(this.ClientRectangle.X + this.ClientRectangle.Width / 8, this.ClientRectangle.Y + (this.ClientRectangle.Height * 3) / 8, (this.ClientRectangle.Width * 6) / 8, (this.ClientRectangle.Height * 2) / 8);
}
private void LinearGauge_Resize(object sender, EventArgs e)
{
GetCenterRectangle();
base.Invalidate();
}
}
}
| |
//Contributor : MVCContrib
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
using Nop.Core;
using Nop.Core.Infrastructure;
using Nop.Services.Localization;
namespace Nop.Web.Framework.UI.Paging
{
/// <summary>
/// Renders a pager component from an IPageableModel datasource.
/// </summary>
public partial class Pager : IHtmlString
{
protected readonly IPageableModel model;
protected readonly ViewContext viewContext;
protected string pageQueryName = "page";
protected bool showTotalSummary;
protected bool showPagerItems = true;
protected bool showFirst = true;
protected bool showPrevious = true;
protected bool showNext = true;
protected bool showLast = true;
protected bool showIndividualPages = true;
protected bool renderEmptyParameters = true;
protected int individualPagesDisplayedCount = 5;
protected Func<int, string> urlBuilder;
protected IList<string> booleanParameterNames;
public Pager(IPageableModel model, ViewContext context)
{
this.model = model;
this.viewContext = context;
this.urlBuilder = CreateDefaultUrl;
this.booleanParameterNames = new List<string>();
}
protected ViewContext ViewContext
{
get { return viewContext; }
}
public Pager QueryParam(string value)
{
this.pageQueryName = value;
return this;
}
public Pager ShowTotalSummary(bool value)
{
this.showTotalSummary = value;
return this;
}
public Pager ShowPagerItems(bool value)
{
this.showPagerItems = value;
return this;
}
public Pager ShowFirst(bool value)
{
this.showFirst = value;
return this;
}
public Pager ShowPrevious(bool value)
{
this.showPrevious = value;
return this;
}
public Pager ShowNext(bool value)
{
this.showNext = value;
return this;
}
public Pager ShowLast(bool value)
{
this.showLast = value;
return this;
}
public Pager ShowIndividualPages(bool value)
{
this.showIndividualPages = value;
return this;
}
public Pager RenderEmptyParameters(bool value)
{
this.renderEmptyParameters = value;
return this;
}
public Pager IndividualPagesDisplayedCount(int value)
{
this.individualPagesDisplayedCount = value;
return this;
}
public Pager Link(Func<int, string> value)
{
this.urlBuilder = value;
return this;
}
//little hack here due to ugly MVC implementation
//find more info here: http://www.mindstorminteractive.com/topics/jquery-fix-asp-net-mvc-checkbox-truefalse-value/
public Pager BooleanParameterName(string paramName)
{
booleanParameterNames.Add(paramName);
return this;
}
public override string ToString()
{
return ToHtmlString();
}
public virtual string ToHtmlString()
{
if (model.TotalItems == 0)
return null;
var localizationService = EngineContext.Current.Resolve<ILocalizationService>();
var links = new StringBuilder();
if (showTotalSummary && (model.TotalPages > 0))
{
links.Append("<li class=\"total-summary\">");
links.Append(string.Format(localizationService.GetResource("Pager.CurrentPage"), model.PageIndex + 1, model.TotalPages, model.TotalItems));
links.Append("</li>");
}
if (showPagerItems && (model.TotalPages > 1))
{
if (showFirst)
{
//first page
if ((model.PageIndex >= 3) && (model.TotalPages > individualPagesDisplayedCount))
{
links.Append(CreatePageLink(1, localizationService.GetResource("Pager.First"), "first-page"));
}
}
if (showPrevious)
{
//previous page
if (model.PageIndex > 0)
{
links.Append(CreatePageLink(model.PageIndex, localizationService.GetResource("Pager.Previous"), "previous-page"));
}
}
if (showIndividualPages)
{
//individual pages
int firstIndividualPageIndex = GetFirstIndividualPageIndex();
int lastIndividualPageIndex = GetLastIndividualPageIndex();
for (int i = firstIndividualPageIndex; i <= lastIndividualPageIndex; i++)
{
if (model.PageIndex == i)
{
links.AppendFormat("<li class=\"current-page\"><span>{0}</span></li>", (i + 1));
}
else
{
links.Append(CreatePageLink(i + 1, (i + 1).ToString(), "individual-page"));
}
}
}
if (showNext)
{
//next page
if ((model.PageIndex + 1) < model.TotalPages)
{
links.Append(CreatePageLink(model.PageIndex + 2, localizationService.GetResource("Pager.Next"), "next-page"));
}
}
if (showLast)
{
//last page
if (((model.PageIndex + 3) < model.TotalPages) && (model.TotalPages > individualPagesDisplayedCount))
{
links.Append(CreatePageLink(model.TotalPages, localizationService.GetResource("Pager.Last"), "last-page"));
}
}
}
var result = links.ToString();
if (!String.IsNullOrEmpty(result))
{
result = "<ul>" + result + "</ul>";
}
return result;
}
public virtual bool IsEmpty()
{
var html = ToString();
return string.IsNullOrEmpty(html);
}
protected virtual int GetFirstIndividualPageIndex()
{
if ((model.TotalPages < individualPagesDisplayedCount) ||
((model.PageIndex - (individualPagesDisplayedCount / 2)) < 0))
{
return 0;
}
if ((model.PageIndex + (individualPagesDisplayedCount / 2)) >= model.TotalPages)
{
return (model.TotalPages - individualPagesDisplayedCount);
}
return (model.PageIndex - (individualPagesDisplayedCount / 2));
}
protected virtual int GetLastIndividualPageIndex()
{
int num = individualPagesDisplayedCount / 2;
if ((individualPagesDisplayedCount % 2) == 0)
{
num--;
}
if ((model.TotalPages < individualPagesDisplayedCount) ||
((model.PageIndex + num) >= model.TotalPages))
{
return (model.TotalPages - 1);
}
if ((model.PageIndex - (individualPagesDisplayedCount / 2)) < 0)
{
return (individualPagesDisplayedCount - 1);
}
return (model.PageIndex + num);
}
protected virtual string CreatePageLink(int pageNumber, string text, string cssClass)
{
var liBuilder = new TagBuilder("li");
if (!String.IsNullOrWhiteSpace(cssClass))
liBuilder.AddCssClass(cssClass);
var aBuilder = new TagBuilder("a");
aBuilder.SetInnerText(text);
aBuilder.MergeAttribute("href", urlBuilder(pageNumber));
liBuilder.InnerHtml += aBuilder;
return liBuilder.ToString(TagRenderMode.Normal);
}
protected virtual string CreateDefaultUrl(int pageNumber)
{
var routeValues = new RouteValueDictionary();
var parametersWithEmptyValues = new List<string>();
foreach (var key in viewContext.RequestContext.HttpContext.Request.QueryString.AllKeys.Where(key => key != null))
{
var value = viewContext.RequestContext.HttpContext.Request.QueryString[key];
if (renderEmptyParameters && String.IsNullOrEmpty(value))
{
//we store query string parameters with empty values separately
//we need to do it because they are not properly processed in the UrlHelper.GenerateUrl method (dropped for some reasons)
parametersWithEmptyValues.Add(key);
}
else
{
if (booleanParameterNames.Contains(key, StringComparer.InvariantCultureIgnoreCase))
{
//little hack here due to ugly MVC implementation
//find more info here: http://www.mindstorminteractive.com/topics/jquery-fix-asp-net-mvc-checkbox-truefalse-value/
if (!String.IsNullOrEmpty(value) && value.Equals("true,false", StringComparison.InvariantCultureIgnoreCase))
{
value = "true";
}
}
routeValues[key] = value;
}
}
if (pageNumber > 1)
{
routeValues[pageQueryName] = pageNumber;
}
else
{
//SEO. we do not render pageindex query string parameter for the first page
if (routeValues.ContainsKey(pageQueryName))
{
routeValues.Remove(pageQueryName);
}
}
var url = UrlHelper.GenerateUrl(null, null, null, routeValues, RouteTable.Routes, viewContext.RequestContext, true);
if (renderEmptyParameters && parametersWithEmptyValues.Any())
{
//we add such parameters manually because UrlHelper.GenerateUrl() ignores them
var webHelper = EngineContext.Current.Resolve<IWebHelper>();
foreach (var key in parametersWithEmptyValues)
{
url = webHelper.ModifyQueryString(url, key + "=", null);
}
}
return url;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using org.apache.juddi.v3.client.annotations;
using org.apache.juddi.v3.client.log;
using org.uddi.apiv3;
using System;
using System.Collections.Generic;
using System.Text;
using System.Web.Services;
namespace org.apache.juddi.v3.client.config
{
class AnnotationProcessor
{
static Log log = LogFactory.getLog(typeof(AnnotationProcessor));
private static readonly String KEYED_REFERENCE = "keyedReference=";
private static readonly String KEY_NAME = "keyName=";
private static readonly String KEY_VALUE = "keyValue=";
private static readonly String TMODEL_KEY = "tModelKey=";
public List<businessService> readServiceAnnotations(string[] classes, Properties properties)
{
List<businessService> items = new List<businessService>();
if (classes != null)
{
foreach (string s in classes)
{
businessService b = readServiceAnnotations(s, properties);
if (b != null)
items.Add(b);
}
}
return items;
}
public businessService readServiceAnnotations(String classWithAnnotations, Properties properties)
{
Type t = Type.GetType(classWithAnnotations, false, true);
if (t != null)
{
businessService service = new businessService();
object[] attrib = t.GetCustomAttributes(typeof(UDDIService), true);
object[] ws = t.GetCustomAttributes(typeof(System.Web.Services.WebServiceBindingAttribute), true);
WebServiceBindingAttribute webServiceAnnotation = null;
if (ws != null && ws.Length > 0)
{
webServiceAnnotation = ((WebServiceBindingAttribute[])ws)[0];
}
if (attrib != null && attrib.Length > 0)
{
UDDIService[] bits = attrib as UDDIService[];
UDDIService uddiService = bits[0];
name n = new name();
n.lang = uddiService.lang;
service.businessKey = (TokenResolver.replaceTokens(uddiService.businessKey, properties));
service.serviceKey = (TokenResolver.replaceTokens(uddiService.serviceKey, properties));
if (!"".Equals(uddiService.serviceName, StringComparison.CurrentCultureIgnoreCase))
{
n.Value = (TokenResolver.replaceTokens(uddiService.serviceName, properties));
}
else if (webServiceAnnotation != null && !"".Equals(webServiceAnnotation.Name))
{
n.Value = (webServiceAnnotation.Name);
}
else
{
n.Value = (classWithAnnotations);
}
service.name = new name[] { n };
description d = new description();
d.lang = (uddiService.lang);
d.Value = (TokenResolver.replaceTokens(uddiService.description, properties));
service.description = new description[] { d };
//categoryBag on the service
if (!"".Equals(uddiService.categoryBag))
{
categoryBag categoryBag = parseCategoryBag(uddiService.categoryBag);
service.categoryBag = (categoryBag);
}
//bindingTemplate on service
bindingTemplate bindingTemplate = parseServiceBinding(classWithAnnotations, uddiService.lang, webServiceAnnotation, properties);
if (bindingTemplate != null)
{
bindingTemplate.serviceKey = (service.serviceKey);
if (service.bindingTemplates == null)
{
service.bindingTemplates = new bindingTemplate[] { bindingTemplate };
}
else
{
List<bindingTemplate> l = new List<bindingTemplate>();
l.AddRange(service.bindingTemplates);
l.Add(bindingTemplate);
service.bindingTemplates = l.ToArray();
}
}
return service;
}
else
{
log.error("Missing UDDIService annotation in class " + classWithAnnotations);
}
}
log.error("Unable to load type " + classWithAnnotations);
return null;
}
private bindingTemplate parseServiceBinding(string classWithAnnotations, string lang, WebServiceBindingAttribute webServiceAnnotation, Properties properties)
{
bindingTemplate bindingTemplate = null;
Type t = Type.GetType(classWithAnnotations, false, false);
UDDIServiceBinding uddiServiceBinding = null;
object[] attrib = t.GetCustomAttributes(typeof(UDDIServiceBinding), true);
if (attrib != null && attrib.Length > 0)
uddiServiceBinding = attrib[0] as UDDIServiceBinding;
//= (UDDIServiceBinding) classWithAnnotations.getAnnotation(UDDIServiceBinding.class);
//binding
if (uddiServiceBinding != null)
{
bindingTemplate = new bindingTemplate();
bindingTemplate.bindingKey = (TokenResolver.replaceTokens(uddiServiceBinding.bindingKey, properties));
String bindingLang = (lang);
if (uddiServiceBinding.lang != null)
{
bindingLang = TokenResolver.replaceTokens(uddiServiceBinding.lang, properties);
}
description bindingDescription = new description();
bindingDescription.lang = (bindingLang);
bindingDescription.Value = (TokenResolver.replaceTokens(uddiServiceBinding.description, properties));
bindingTemplate.description = new description[] { (bindingDescription) };
accessPoint accessPoint = new accessPoint();
accessPoint.useType = (AccessPointType.wsdlDeployment.ToString());
if (!"".Equals(uddiServiceBinding.accessPointType))
{
accessPoint.useType = (uddiServiceBinding.accessPointType);
}
if (!"".Equals(uddiServiceBinding.accessPoint))
{
String endPoint = uddiServiceBinding.accessPoint;
endPoint = TokenResolver.replaceTokens(endPoint, properties);
log.debug("AccessPoint EndPoint=" + endPoint);
accessPoint.Value = (endPoint);
}
else if (webServiceAnnotation != null && webServiceAnnotation.Location != null)
{
accessPoint.Value = (webServiceAnnotation.Location);
}
bindingTemplate.Item = (accessPoint);
//tModelKeys on the binding
if (!"".Equals(uddiServiceBinding.tModelKeys))
{
String[] tModelKeys = uddiServiceBinding.tModelKeys.Split(',');
foreach (String tModelKey in tModelKeys)
{
tModelInstanceInfo instanceInfo = new tModelInstanceInfo();
instanceInfo.tModelKey = (tModelKey);
if (bindingTemplate.tModelInstanceDetails == null)
{
bindingTemplate.tModelInstanceDetails = (new tModelInstanceInfo[] { instanceInfo });
}
List<tModelInstanceInfo> l = new List<tModelInstanceInfo>();
l.AddRange(bindingTemplate.tModelInstanceDetails);
l.Add(instanceInfo);
bindingTemplate.tModelInstanceDetails = l.ToArray();
}
}
//categoryBag on the binding
if (!"".Equals(uddiServiceBinding.categoryBag))
{
categoryBag categoryBag = parseCategoryBag(uddiServiceBinding.categoryBag);
bindingTemplate.categoryBag = (categoryBag);
}
}
else
{
log.error("Missing UDDIServiceBinding annotation in class " + classWithAnnotations);
}
return bindingTemplate;
}
private categoryBag parseCategoryBag(string categoryBagStr)
{
categoryBag cb = new categoryBag();
log.debug("CategoryBag Annotation=" + cb);
if (!"".Equals(categoryBagStr))
{
List<keyedReference> cbs = new List<keyedReference>();
String[] sections = categoryBagStr.Split(',');
foreach (String section in sections)
{
if (section.StartsWith(KEYED_REFERENCE))
{
String keyedReferenceStr = section.Substring(KEYED_REFERENCE.Length, section.Length);
log.debug("Found KeyedReference=" + keyedReferenceStr);
String[] keyedReferences = keyedReferenceStr.Split(';');
keyedReference keyedReference = new keyedReference();
foreach (String key in keyedReferences)
{
if (key.StartsWith(KEY_NAME)) keyedReference.keyName = (key.Substring(KEY_NAME.Length, key.Length));
if (key.StartsWith(KEY_VALUE)) keyedReference.keyValue = (key.Substring(KEY_VALUE.Length, key.Length));
if (key.StartsWith(TMODEL_KEY)) keyedReference.tModelKey = (key.Substring(TMODEL_KEY.Length, key.Length));
}
log.debug("KeyedReference = " + KEY_NAME + keyedReference.keyName + " "
+ KEY_VALUE + keyedReference.keyValue + " "
+ TMODEL_KEY + keyedReference.tModelKey);
cbs.Add(keyedReference);
}
else
{
log.warn("Ignoring " + section);
//TODO add support for KeyedReferenceGroups?
}
}
cb.Items = cbs.ToArray();
}
return cb;
}
}
}
| |
#region Copyright notice and license
// Copyright 2015, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Google.Apis.Auth.OAuth2;
using Google.ProtocolBuffers;
using grpc.testing;
using Grpc.Auth;
using Grpc.Core;
using Grpc.Core.Utils;
using NUnit.Framework;
namespace Grpc.IntegrationTesting
{
public class InteropClient
{
private const string ServiceAccountUser = "155450119199-3psnrh1sdr3d8cpj1v46naggf81mhdnk@developer.gserviceaccount.com";
private const string ComputeEngineUser = "155450119199-r5aaqa2vqoa9g5mv2m6s3m1l293rlmel@developer.gserviceaccount.com";
private const string AuthScope = "https://www.googleapis.com/auth/xapi.zoo";
private const string AuthScopeResponse = "xapi.zoo";
private class ClientOptions
{
public bool help;
public string serverHost = "127.0.0.1";
public string serverHostOverride = TestCredentials.DefaultHostOverride;
public int? serverPort;
public string testCase = "large_unary";
public bool useTls;
public bool useTestCa;
}
ClientOptions options;
private InteropClient(ClientOptions options)
{
this.options = options;
}
public static void Run(string[] args)
{
Console.WriteLine("gRPC C# interop testing client");
ClientOptions options = ParseArguments(args);
if (options.serverHost == null || !options.serverPort.HasValue || options.testCase == null)
{
Console.WriteLine("Missing required argument.");
Console.WriteLine();
options.help = true;
}
if (options.help)
{
Console.WriteLine("Usage:");
Console.WriteLine(" --server_host=HOSTNAME");
Console.WriteLine(" --server_host_override=HOSTNAME");
Console.WriteLine(" --server_port=PORT");
Console.WriteLine(" --test_case=TESTCASE");
Console.WriteLine(" --use_tls=BOOLEAN");
Console.WriteLine(" --use_test_ca=BOOLEAN");
Console.WriteLine();
Environment.Exit(1);
}
var interopClient = new InteropClient(options);
interopClient.Run().Wait();
}
private async Task Run()
{
Credentials credentials = null;
if (options.useTls)
{
credentials = TestCredentials.CreateTestClientCredentials(options.useTestCa);
}
List<ChannelOption> channelOptions = null;
if (!string.IsNullOrEmpty(options.serverHostOverride))
{
channelOptions = new List<ChannelOption>
{
new ChannelOption(ChannelOptions.SslTargetNameOverride, options.serverHostOverride)
};
}
var channel = new Channel(options.serverHost, options.serverPort.Value, credentials, channelOptions);
TestService.TestServiceClient client = new TestService.TestServiceClient(channel);
await RunTestCaseAsync(options.testCase, client);
channel.ShutdownAsync().Wait();
}
private async Task RunTestCaseAsync(string testCase, TestService.TestServiceClient client)
{
switch (testCase)
{
case "empty_unary":
RunEmptyUnary(client);
break;
case "large_unary":
RunLargeUnary(client);
break;
case "client_streaming":
await RunClientStreamingAsync(client);
break;
case "server_streaming":
await RunServerStreamingAsync(client);
break;
case "ping_pong":
await RunPingPongAsync(client);
break;
case "empty_stream":
await RunEmptyStreamAsync(client);
break;
case "service_account_creds":
await RunServiceAccountCredsAsync(client);
break;
case "compute_engine_creds":
await RunComputeEngineCredsAsync(client);
break;
case "jwt_token_creds":
await RunJwtTokenCredsAsync(client);
break;
case "oauth2_auth_token":
await RunOAuth2AuthTokenAsync(client);
break;
case "per_rpc_creds":
await RunPerRpcCredsAsync(client);
break;
case "cancel_after_begin":
await RunCancelAfterBeginAsync(client);
break;
case "cancel_after_first_response":
await RunCancelAfterFirstResponseAsync(client);
break;
case "timeout_on_sleeping_server":
await RunTimeoutOnSleepingServerAsync(client);
break;
case "benchmark_empty_unary":
RunBenchmarkEmptyUnary(client);
break;
default:
throw new ArgumentException("Unknown test case " + testCase);
}
}
public static void RunEmptyUnary(TestService.ITestServiceClient client)
{
Console.WriteLine("running empty_unary");
var response = client.EmptyCall(Empty.DefaultInstance);
Assert.IsNotNull(response);
Console.WriteLine("Passed!");
}
public static void RunLargeUnary(TestService.ITestServiceClient client)
{
Console.WriteLine("running large_unary");
var request = SimpleRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.SetResponseSize(314159)
.SetPayload(CreateZerosPayload(271828))
.Build();
var response = client.UnaryCall(request);
Assert.AreEqual(PayloadType.COMPRESSABLE, response.Payload.Type);
Assert.AreEqual(314159, response.Payload.Body.Length);
Console.WriteLine("Passed!");
}
public static async Task RunClientStreamingAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running client_streaming");
var bodySizes = new List<int> { 27182, 8, 1828, 45904 }.ConvertAll((size) => StreamingInputCallRequest.CreateBuilder().SetPayload(CreateZerosPayload(size)).Build());
using (var call = client.StreamingInputCall())
{
await call.RequestStream.WriteAllAsync(bodySizes);
var response = await call.ResponseAsync;
Assert.AreEqual(74922, response.AggregatedPayloadSize);
}
Console.WriteLine("Passed!");
}
public static async Task RunServerStreamingAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running server_streaming");
var bodySizes = new List<int> { 31415, 9, 2653, 58979 };
var request = StreamingOutputCallRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.AddRangeResponseParameters(bodySizes.ConvertAll(
(size) => ResponseParameters.CreateBuilder().SetSize(size).Build()))
.Build();
using (var call = client.StreamingOutputCall(request))
{
var responseList = await call.ResponseStream.ToListAsync();
foreach (var res in responseList)
{
Assert.AreEqual(PayloadType.COMPRESSABLE, res.Payload.Type);
}
CollectionAssert.AreEqual(bodySizes, responseList.ConvertAll((item) => item.Payload.Body.Length));
}
Console.WriteLine("Passed!");
}
public static async Task RunPingPongAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running ping_pong");
using (var call = client.FullDuplexCall())
{
await call.RequestStream.WriteAsync(StreamingOutputCallRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.AddResponseParameters(ResponseParameters.CreateBuilder().SetSize(31415))
.SetPayload(CreateZerosPayload(27182)).Build());
Assert.IsTrue(await call.ResponseStream.MoveNext());
Assert.AreEqual(PayloadType.COMPRESSABLE, call.ResponseStream.Current.Payload.Type);
Assert.AreEqual(31415, call.ResponseStream.Current.Payload.Body.Length);
await call.RequestStream.WriteAsync(StreamingOutputCallRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.AddResponseParameters(ResponseParameters.CreateBuilder().SetSize(9))
.SetPayload(CreateZerosPayload(8)).Build());
Assert.IsTrue(await call.ResponseStream.MoveNext());
Assert.AreEqual(PayloadType.COMPRESSABLE, call.ResponseStream.Current.Payload.Type);
Assert.AreEqual(9, call.ResponseStream.Current.Payload.Body.Length);
await call.RequestStream.WriteAsync(StreamingOutputCallRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.AddResponseParameters(ResponseParameters.CreateBuilder().SetSize(2653))
.SetPayload(CreateZerosPayload(1828)).Build());
Assert.IsTrue(await call.ResponseStream.MoveNext());
Assert.AreEqual(PayloadType.COMPRESSABLE, call.ResponseStream.Current.Payload.Type);
Assert.AreEqual(2653, call.ResponseStream.Current.Payload.Body.Length);
await call.RequestStream.WriteAsync(StreamingOutputCallRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.AddResponseParameters(ResponseParameters.CreateBuilder().SetSize(58979))
.SetPayload(CreateZerosPayload(45904)).Build());
Assert.IsTrue(await call.ResponseStream.MoveNext());
Assert.AreEqual(PayloadType.COMPRESSABLE, call.ResponseStream.Current.Payload.Type);
Assert.AreEqual(58979, call.ResponseStream.Current.Payload.Body.Length);
await call.RequestStream.CompleteAsync();
Assert.IsFalse(await call.ResponseStream.MoveNext());
}
Console.WriteLine("Passed!");
}
public static async Task RunEmptyStreamAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running empty_stream");
using (var call = client.FullDuplexCall())
{
await call.RequestStream.CompleteAsync();
var responseList = await call.ResponseStream.ToListAsync();
Assert.AreEqual(0, responseList.Count);
}
Console.WriteLine("Passed!");
}
public static async Task RunServiceAccountCredsAsync(TestService.TestServiceClient client)
{
Console.WriteLine("running service_account_creds");
var credential = await GoogleCredential.GetApplicationDefaultAsync();
credential = credential.CreateScoped(new[] { AuthScope });
client.HeaderInterceptor = AuthInterceptors.FromCredential(credential);
var request = SimpleRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.SetResponseSize(314159)
.SetPayload(CreateZerosPayload(271828))
.SetFillUsername(true)
.SetFillOauthScope(true)
.Build();
var response = client.UnaryCall(request);
Assert.AreEqual(PayloadType.COMPRESSABLE, response.Payload.Type);
Assert.AreEqual(314159, response.Payload.Body.Length);
Assert.AreEqual(AuthScopeResponse, response.OauthScope);
Assert.AreEqual(ServiceAccountUser, response.Username);
Console.WriteLine("Passed!");
}
public static async Task RunComputeEngineCredsAsync(TestService.TestServiceClient client)
{
Console.WriteLine("running compute_engine_creds");
var credential = await GoogleCredential.GetApplicationDefaultAsync();
Assert.IsFalse(credential.IsCreateScopedRequired);
client.HeaderInterceptor = AuthInterceptors.FromCredential(credential);
var request = SimpleRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.SetResponseSize(314159)
.SetPayload(CreateZerosPayload(271828))
.SetFillUsername(true)
.SetFillOauthScope(true)
.Build();
var response = client.UnaryCall(request);
Assert.AreEqual(PayloadType.COMPRESSABLE, response.Payload.Type);
Assert.AreEqual(314159, response.Payload.Body.Length);
Assert.AreEqual(AuthScopeResponse, response.OauthScope);
Assert.AreEqual(ComputeEngineUser, response.Username);
Console.WriteLine("Passed!");
}
public static async Task RunJwtTokenCredsAsync(TestService.TestServiceClient client)
{
Console.WriteLine("running jwt_token_creds");
var credential = await GoogleCredential.GetApplicationDefaultAsync();
// check this a credential with scope support, but don't add the scope.
Assert.IsTrue(credential.IsCreateScopedRequired);
client.HeaderInterceptor = AuthInterceptors.FromCredential(credential);
var request = SimpleRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.SetResponseSize(314159)
.SetPayload(CreateZerosPayload(271828))
.SetFillUsername(true)
.SetFillOauthScope(true)
.Build();
var response = client.UnaryCall(request);
Assert.AreEqual(PayloadType.COMPRESSABLE, response.Payload.Type);
Assert.AreEqual(314159, response.Payload.Body.Length);
Assert.AreEqual(ServiceAccountUser, response.Username);
Console.WriteLine("Passed!");
}
public static async Task RunOAuth2AuthTokenAsync(TestService.TestServiceClient client)
{
Console.WriteLine("running oauth2_auth_token");
ITokenAccess credential = (await GoogleCredential.GetApplicationDefaultAsync()).CreateScoped(new[] { AuthScope });
string oauth2Token = await credential.GetAccessTokenForRequestAsync();
client.HeaderInterceptor = AuthInterceptors.FromAccessToken(oauth2Token);
var request = SimpleRequest.CreateBuilder()
.SetFillUsername(true)
.SetFillOauthScope(true)
.Build();
var response = client.UnaryCall(request);
Assert.AreEqual(AuthScopeResponse, response.OauthScope);
Assert.AreEqual(ServiceAccountUser, response.Username);
Console.WriteLine("Passed!");
}
public static async Task RunPerRpcCredsAsync(TestService.TestServiceClient client)
{
Console.WriteLine("running per_rpc_creds");
ITokenAccess credential = (await GoogleCredential.GetApplicationDefaultAsync()).CreateScoped(new[] { AuthScope });
string oauth2Token = await credential.GetAccessTokenForRequestAsync();
var headerInterceptor = AuthInterceptors.FromAccessToken(oauth2Token);
var request = SimpleRequest.CreateBuilder()
.SetFillUsername(true)
.SetFillOauthScope(true)
.Build();
var headers = new Metadata();
headerInterceptor(null, "", headers);
var response = client.UnaryCall(request, headers: headers);
Assert.AreEqual(AuthScopeResponse, response.OauthScope);
Assert.AreEqual(ServiceAccountUser, response.Username);
Console.WriteLine("Passed!");
}
public static async Task RunCancelAfterBeginAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running cancel_after_begin");
var cts = new CancellationTokenSource();
using (var call = client.StreamingInputCall(cancellationToken: cts.Token))
{
// TODO(jtattermusch): we need this to ensure call has been initiated once we cancel it.
await Task.Delay(1000);
cts.Cancel();
var ex = Assert.Throws<RpcException>(async () => await call.ResponseAsync);
Assert.AreEqual(StatusCode.Cancelled, ex.Status.StatusCode);
}
Console.WriteLine("Passed!");
}
public static async Task RunCancelAfterFirstResponseAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running cancel_after_first_response");
var cts = new CancellationTokenSource();
using (var call = client.FullDuplexCall(cancellationToken: cts.Token))
{
await call.RequestStream.WriteAsync(StreamingOutputCallRequest.CreateBuilder()
.SetResponseType(PayloadType.COMPRESSABLE)
.AddResponseParameters(ResponseParameters.CreateBuilder().SetSize(31415))
.SetPayload(CreateZerosPayload(27182)).Build());
Assert.IsTrue(await call.ResponseStream.MoveNext());
Assert.AreEqual(PayloadType.COMPRESSABLE, call.ResponseStream.Current.Payload.Type);
Assert.AreEqual(31415, call.ResponseStream.Current.Payload.Body.Length);
cts.Cancel();
var ex = Assert.Throws<RpcException>(async () => await call.ResponseStream.MoveNext());
Assert.AreEqual(StatusCode.Cancelled, ex.Status.StatusCode);
}
Console.WriteLine("Passed!");
}
public static async Task RunTimeoutOnSleepingServerAsync(TestService.ITestServiceClient client)
{
Console.WriteLine("running timeout_on_sleeping_server");
var deadline = DateTime.UtcNow.AddMilliseconds(1);
using (var call = client.FullDuplexCall(deadline: deadline))
{
try
{
await call.RequestStream.WriteAsync(StreamingOutputCallRequest.CreateBuilder()
.SetPayload(CreateZerosPayload(27182)).Build());
}
catch (InvalidOperationException)
{
// Deadline was reached before write has started. Eat the exception and continue.
}
var ex = Assert.Throws<RpcException>(async () => await call.ResponseStream.MoveNext());
Assert.AreEqual(StatusCode.DeadlineExceeded, ex.Status.StatusCode);
}
Console.WriteLine("Passed!");
}
// This is not an official interop test, but it's useful.
public static void RunBenchmarkEmptyUnary(TestService.ITestServiceClient client)
{
BenchmarkUtil.RunBenchmark(10000, 10000,
() => { client.EmptyCall(Empty.DefaultInstance); });
}
private static Payload CreateZerosPayload(int size)
{
return Payload.CreateBuilder().SetBody(ByteString.CopyFrom(new byte[size])).Build();
}
private static ClientOptions ParseArguments(string[] args)
{
var options = new ClientOptions();
foreach (string arg in args)
{
ParseArgument(arg, options);
if (options.help)
{
break;
}
}
return options;
}
private static void ParseArgument(string arg, ClientOptions options)
{
Match match;
match = Regex.Match(arg, "--server_host=(.*)");
if (match.Success)
{
options.serverHost = match.Groups[1].Value.Trim();
return;
}
match = Regex.Match(arg, "--server_host_override=(.*)");
if (match.Success)
{
options.serverHostOverride = match.Groups[1].Value.Trim();
return;
}
match = Regex.Match(arg, "--server_port=(.*)");
if (match.Success)
{
options.serverPort = int.Parse(match.Groups[1].Value.Trim());
return;
}
match = Regex.Match(arg, "--test_case=(.*)");
if (match.Success)
{
options.testCase = match.Groups[1].Value.Trim();
return;
}
match = Regex.Match(arg, "--use_tls=(.*)");
if (match.Success)
{
options.useTls = bool.Parse(match.Groups[1].Value.Trim());
return;
}
match = Regex.Match(arg, "--use_test_ca=(.*)");
if (match.Success)
{
options.useTestCa = bool.Parse(match.Groups[1].Value.Trim());
return;
}
Console.WriteLine(string.Format("Unrecognized argument \"{0}\"", arg));
options.help = true;
}
}
}
| |
using EIDSS.Reports.Document.Lim.ContainerContent;
using EIDSS.Reports.Document.Lim.SampleDestruction.SampleDestructionDataSetTableAdapters;
namespace EIDSS.Reports.Document.Lim.SampleDestruction
{
partial class SampleDestructionReport
{
#region Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(SampleDestructionReport));
this.DetailReport = new DevExpress.XtraReports.UI.DetailReportBand();
this.SampleDestructionDetail = new DevExpress.XtraReports.UI.DetailBand();
this.xrTable1 = new DevExpress.XtraReports.UI.XRTable();
this.xrTableRow5 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell13 = new DevExpress.XtraReports.UI.XRTableCell();
this.SampleIdTable = new DevExpress.XtraReports.UI.XRTable();
this.xrTableRow1 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell3 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableRow2 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell4 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell14 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell5 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell2 = new DevExpress.XtraReports.UI.XRTableCell();
this.SampleDestructionReportHeader = new DevExpress.XtraReports.UI.ReportHeaderBand();
this.tableFreezer = new DevExpress.XtraReports.UI.XRTable();
this.xrTableRow10 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell1 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell10 = new DevExpress.XtraReports.UI.XRTableCell();
this.FreezerNumberCell = new DevExpress.XtraReports.UI.XRTableCell();
this.FreezerBarcodeCell = new DevExpress.XtraReports.UI.XRTableCell();
this.SampleDestructionAdapter = new EIDSS.Reports.Document.Lim.SampleDestruction.SampleDestructionDataSetTableAdapters.SampleDestructionAdapter();
this.SampleDestructionDataSet = new EIDSS.Reports.Document.Lim.SampleDestruction.SampleDestructionDataSet();
this.SampleDestructionReportFooter = new DevExpress.XtraReports.UI.ReportFooterBand();
this.xrTable2 = new DevExpress.XtraReports.UI.XRTable();
this.xrTableRow3 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell6 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell8 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableRow4 = new DevExpress.XtraReports.UI.XRTableRow();
this.xrTableCell7 = new DevExpress.XtraReports.UI.XRTableCell();
this.xrTableCell9 = new DevExpress.XtraReports.UI.XRTableCell();
((System.ComponentModel.ISupportInitialize)(this.m_BaseDataSet)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.tableBaseHeader)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.xrTable1)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.SampleIdTable)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.tableFreezer)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.SampleDestructionDataSet)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.xrTable2)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this)).BeginInit();
//
// cellLanguage
//
this.cellLanguage.StylePriority.UseTextAlignment = false;
//
// lblReportName
//
this.lblReportName.StylePriority.UseBorders = false;
this.lblReportName.StylePriority.UseBorderWidth = false;
this.lblReportName.StylePriority.UseFont = false;
this.lblReportName.StylePriority.UseTextAlignment = false;
resources.ApplyResources(this.lblReportName, "lblReportName");
//
// Detail
//
this.Detail.StylePriority.UseFont = false;
this.Detail.StylePriority.UsePadding = false;
//
// PageHeader
//
this.PageHeader.StylePriority.UseFont = false;
this.PageHeader.StylePriority.UsePadding = false;
//
// PageFooter
//
this.PageFooter.StylePriority.UseBorders = false;
//
// ReportHeader
//
resources.ApplyResources(this.ReportHeader, "ReportHeader");
//
// xrPageInfo1
//
this.xrPageInfo1.StylePriority.UseBorders = false;
//
// cellReportHeader
//
this.cellReportHeader.StylePriority.UseBorders = false;
this.cellReportHeader.StylePriority.UseFont = false;
this.cellReportHeader.StylePriority.UseTextAlignment = false;
//
// cellBaseSite
//
this.cellBaseSite.StylePriority.UseBorders = false;
this.cellBaseSite.StylePriority.UseFont = false;
this.cellBaseSite.StylePriority.UseTextAlignment = false;
//
// tableBaseHeader
//
this.tableBaseHeader.StylePriority.UseBorders = false;
this.tableBaseHeader.StylePriority.UseBorderWidth = false;
this.tableBaseHeader.StylePriority.UseFont = false;
this.tableBaseHeader.StylePriority.UsePadding = false;
this.tableBaseHeader.StylePriority.UseTextAlignment = false;
//
// DetailReport
//
this.DetailReport.Bands.AddRange(new DevExpress.XtraReports.UI.Band[] {
this.SampleDestructionDetail,
this.SampleDestructionReportHeader});
this.DetailReport.DataAdapter = this.SampleDestructionAdapter;
this.DetailReport.DataMember = "SampleDestruction";
this.DetailReport.DataSource = this.SampleDestructionDataSet;
this.DetailReport.Level = 0;
this.DetailReport.Name = "DetailReport";
this.DetailReport.Padding = new DevExpress.XtraPrinting.PaddingInfo(2, 2, 0, 0, 100F);
resources.ApplyResources(this.DetailReport, "DetailReport");
//
// SampleDestructionDetail
//
this.SampleDestructionDetail.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.xrTable1});
resources.ApplyResources(this.SampleDestructionDetail, "SampleDestructionDetail");
this.SampleDestructionDetail.KeepTogether = true;
this.SampleDestructionDetail.Name = "SampleDestructionDetail";
this.SampleDestructionDetail.Padding = new DevExpress.XtraPrinting.PaddingInfo(2, 2, 2, 2, 100F);
this.SampleDestructionDetail.StylePriority.UseFont = false;
this.SampleDestructionDetail.StylePriority.UsePadding = false;
this.SampleDestructionDetail.StylePriority.UseTextAlignment = false;
//
// xrTable1
//
this.xrTable1.Borders = ((DevExpress.XtraPrinting.BorderSide)(((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
resources.ApplyResources(this.xrTable1, "xrTable1");
this.xrTable1.Name = "xrTable1";
this.xrTable1.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.xrTableRow5});
this.xrTable1.StylePriority.UseBorders = false;
this.xrTable1.StylePriority.UseTextAlignment = false;
//
// xrTableRow5
//
this.xrTableRow5.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell13,
this.xrTableCell14,
this.xrTableCell5,
this.xrTableCell2});
this.xrTableRow5.Name = "xrTableRow5";
resources.ApplyResources(this.xrTableRow5, "xrTableRow5");
//
// xrTableCell13
//
this.xrTableCell13.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.SampleIdTable});
this.xrTableCell13.Name = "xrTableCell13";
this.xrTableCell13.StylePriority.UseTextAlignment = false;
resources.ApplyResources(this.xrTableCell13, "xrTableCell13");
//
// SampleIdTable
//
this.SampleIdTable.Borders = DevExpress.XtraPrinting.BorderSide.None;
resources.ApplyResources(this.SampleIdTable, "SampleIdTable");
this.SampleIdTable.Name = "SampleIdTable";
this.SampleIdTable.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.xrTableRow1,
this.xrTableRow2});
this.SampleIdTable.StylePriority.UseBorders = false;
this.SampleIdTable.StylePriority.UseTextAlignment = false;
//
// xrTableRow1
//
this.xrTableRow1.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell3});
this.xrTableRow1.Name = "xrTableRow1";
resources.ApplyResources(this.xrTableRow1, "xrTableRow1");
//
// xrTableCell3
//
this.xrTableCell3.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "SampleDestruction.strLabSampleID", "*{0}*")});
resources.ApplyResources(this.xrTableCell3, "xrTableCell3");
this.xrTableCell3.Name = "xrTableCell3";
this.xrTableCell3.StylePriority.UseFont = false;
//
// xrTableRow2
//
this.xrTableRow2.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell4});
this.xrTableRow2.Name = "xrTableRow2";
resources.ApplyResources(this.xrTableRow2, "xrTableRow2");
//
// xrTableCell4
//
this.xrTableCell4.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "SampleDestruction.strLabSampleID")});
this.xrTableCell4.Name = "xrTableCell4";
resources.ApplyResources(this.xrTableCell4, "xrTableCell4");
//
// xrTableCell14
//
this.xrTableCell14.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "SampleDestruction.strSampleType")});
this.xrTableCell14.Name = "xrTableCell14";
this.xrTableCell14.StylePriority.UseTextAlignment = false;
resources.ApplyResources(this.xrTableCell14, "xrTableCell14");
//
// xrTableCell5
//
this.xrTableCell5.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "SampleDestruction.strCondition")});
this.xrTableCell5.Name = "xrTableCell5";
resources.ApplyResources(this.xrTableCell5, "xrTableCell5");
//
// xrTableCell2
//
this.xrTableCell2.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", null, "SampleDestruction.strDestructionMethod")});
this.xrTableCell2.Name = "xrTableCell2";
resources.ApplyResources(this.xrTableCell2, "xrTableCell2");
//
// SampleDestructionReportHeader
//
this.SampleDestructionReportHeader.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.tableFreezer});
resources.ApplyResources(this.SampleDestructionReportHeader, "SampleDestructionReportHeader");
this.SampleDestructionReportHeader.KeepTogether = true;
this.SampleDestructionReportHeader.Name = "SampleDestructionReportHeader";
this.SampleDestructionReportHeader.Padding = new DevExpress.XtraPrinting.PaddingInfo(2, 2, 2, 2, 100F);
this.SampleDestructionReportHeader.StylePriority.UseFont = false;
this.SampleDestructionReportHeader.StylePriority.UsePadding = false;
this.SampleDestructionReportHeader.StylePriority.UseTextAlignment = false;
//
// tableFreezer
//
this.tableFreezer.Borders = ((DevExpress.XtraPrinting.BorderSide)((((DevExpress.XtraPrinting.BorderSide.Left | DevExpress.XtraPrinting.BorderSide.Top)
| DevExpress.XtraPrinting.BorderSide.Right)
| DevExpress.XtraPrinting.BorderSide.Bottom)));
resources.ApplyResources(this.tableFreezer, "tableFreezer");
this.tableFreezer.Name = "tableFreezer";
this.tableFreezer.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.xrTableRow10});
this.tableFreezer.StylePriority.UseBorders = false;
this.tableFreezer.StylePriority.UseFont = false;
//
// xrTableRow10
//
this.xrTableRow10.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell1,
this.xrTableCell10,
this.FreezerNumberCell,
this.FreezerBarcodeCell});
this.xrTableRow10.Name = "xrTableRow10";
resources.ApplyResources(this.xrTableRow10, "xrTableRow10");
//
// xrTableCell1
//
this.xrTableCell1.Name = "xrTableCell1";
resources.ApplyResources(this.xrTableCell1, "xrTableCell1");
//
// xrTableCell10
//
this.xrTableCell10.Name = "xrTableCell10";
this.xrTableCell10.StylePriority.UseTextAlignment = false;
resources.ApplyResources(this.xrTableCell10, "xrTableCell10");
//
// FreezerNumberCell
//
this.FreezerNumberCell.Name = "FreezerNumberCell";
resources.ApplyResources(this.FreezerNumberCell, "FreezerNumberCell");
//
// FreezerBarcodeCell
//
this.FreezerBarcodeCell.Name = "FreezerBarcodeCell";
resources.ApplyResources(this.FreezerBarcodeCell, "FreezerBarcodeCell");
//
// SampleDestructionAdapter
//
this.SampleDestructionAdapter.ClearBeforeFill = true;
//
// SampleDestructionDataSet
//
this.SampleDestructionDataSet.DataSetName = "ContainerContentDataSet";
this.SampleDestructionDataSet.SchemaSerializationMode = System.Data.SchemaSerializationMode.IncludeSchema;
//
// SampleDestructionReportFooter
//
this.SampleDestructionReportFooter.Controls.AddRange(new DevExpress.XtraReports.UI.XRControl[] {
this.xrTable2});
resources.ApplyResources(this.SampleDestructionReportFooter, "SampleDestructionReportFooter");
this.SampleDestructionReportFooter.Name = "SampleDestructionReportFooter";
this.SampleDestructionReportFooter.StylePriority.UseTextAlignment = false;
//
// xrTable2
//
resources.ApplyResources(this.xrTable2, "xrTable2");
this.xrTable2.Name = "xrTable2";
this.xrTable2.Rows.AddRange(new DevExpress.XtraReports.UI.XRTableRow[] {
this.xrTableRow3,
this.xrTableRow4});
//
// xrTableRow3
//
this.xrTableRow3.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell6,
this.xrTableCell8});
this.xrTableRow3.Name = "xrTableRow3";
resources.ApplyResources(this.xrTableRow3, "xrTableRow3");
//
// xrTableCell6
//
this.xrTableCell6.Name = "xrTableCell6";
resources.ApplyResources(this.xrTableCell6, "xrTableCell6");
//
// xrTableCell8
//
this.xrTableCell8.Borders = DevExpress.XtraPrinting.BorderSide.Bottom;
this.xrTableCell8.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", this.SampleDestructionDataSet, "SampleDestruction.strSentForDestructionBy")});
this.xrTableCell8.Name = "xrTableCell8";
this.xrTableCell8.StylePriority.UseBorders = false;
resources.ApplyResources(this.xrTableCell8, "xrTableCell8");
//
// xrTableRow4
//
this.xrTableRow4.Cells.AddRange(new DevExpress.XtraReports.UI.XRTableCell[] {
this.xrTableCell7,
this.xrTableCell9});
this.xrTableRow4.Name = "xrTableRow4";
resources.ApplyResources(this.xrTableRow4, "xrTableRow4");
//
// xrTableCell7
//
this.xrTableCell7.Name = "xrTableCell7";
resources.ApplyResources(this.xrTableCell7, "xrTableCell7");
//
// xrTableCell9
//
this.xrTableCell9.Borders = DevExpress.XtraPrinting.BorderSide.Bottom;
this.xrTableCell9.DataBindings.AddRange(new DevExpress.XtraReports.UI.XRBinding[] {
new DevExpress.XtraReports.UI.XRBinding("Text", this.SampleDestructionDataSet, "SampleDestruction.strDestructionApprovedBy")});
this.xrTableCell9.Name = "xrTableCell9";
this.xrTableCell9.StylePriority.UseBorders = false;
resources.ApplyResources(this.xrTableCell9, "xrTableCell9");
//
// SampleDestructionReport
//
this.Bands.AddRange(new DevExpress.XtraReports.UI.Band[] {
this.Detail,
this.PageHeader,
this.PageFooter,
this.ReportHeader,
this.DetailReport,
this.SampleDestructionReportFooter});
resources.ApplyResources(this, "$this");
this.Version = "13.1";
this.Controls.SetChildIndex(this.SampleDestructionReportFooter, 0);
this.Controls.SetChildIndex(this.DetailReport, 0);
this.Controls.SetChildIndex(this.ReportHeader, 0);
this.Controls.SetChildIndex(this.PageFooter, 0);
this.Controls.SetChildIndex(this.PageHeader, 0);
this.Controls.SetChildIndex(this.Detail, 0);
((System.ComponentModel.ISupportInitialize)(this.m_BaseDataSet)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.tableBaseHeader)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.xrTable1)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.SampleIdTable)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.tableFreezer)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.SampleDestructionDataSet)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.xrTable2)).EndInit();
((System.ComponentModel.ISupportInitialize)(this)).EndInit();
}
#endregion
private DevExpress.XtraReports.UI.DetailReportBand DetailReport;
private DevExpress.XtraReports.UI.DetailBand SampleDestructionDetail;
private DevExpress.XtraReports.UI.XRTable tableFreezer;
private DevExpress.XtraReports.UI.ReportHeaderBand SampleDestructionReportHeader;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow10;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell10;
private SampleDestructionAdapter SampleDestructionAdapter;
private SampleDestructionDataSet SampleDestructionDataSet;
private DevExpress.XtraReports.UI.XRTableCell FreezerBarcodeCell;
private DevExpress.XtraReports.UI.XRTableCell FreezerNumberCell;
private DevExpress.XtraReports.UI.XRTable xrTable1;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow5;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell13;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell14;
private DevExpress.XtraReports.UI.XRTable SampleIdTable;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow1;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell3;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow2;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell4;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell5;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell2;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell1;
private DevExpress.XtraReports.UI.ReportFooterBand SampleDestructionReportFooter;
private DevExpress.XtraReports.UI.XRTable xrTable2;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow3;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell6;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell8;
private DevExpress.XtraReports.UI.XRTableRow xrTableRow4;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell7;
private DevExpress.XtraReports.UI.XRTableCell xrTableCell9;
}
}
| |
using Microsoft.TeamFoundation.Build.WebApi;
using Microsoft.TeamFoundation.DistributedTask.WebApi;
using Microsoft.VisualStudio.Services.Agent.Util;
using Microsoft.VisualStudio.Services.Agent.Worker;
using Microsoft.VisualStudio.Services.Agent.Worker.Build;
using Moq;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace Microsoft.VisualStudio.Services.Agent.Tests.Worker.Build
{
public sealed class GitSourceProviderL0
{
private Mock<IGitCommandManager> GetDefaultGitCommandMock()
{
Mock<IGitCommandManager> _gitCommandManager = new Mock<IGitCommandManager>();
_gitCommandManager
.Setup(x => x.EnsureGitVersion(It.IsAny<Version>(), It.IsAny<bool>()))
.Returns(true);
_gitCommandManager
.Setup(x => x.LoadGitExecutionInfo(It.IsAny<IExecutionContext>(), It.IsAny<bool>()))
.Returns(Task.CompletedTask);
_gitCommandManager
.Setup(x => x.GitInit(It.IsAny<IExecutionContext>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitRemoteAdd(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitFetch(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<int>(), It.IsAny<List<string>>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitLFSFetch(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitCheckout(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitClean(It.IsAny<IExecutionContext>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitReset(It.IsAny<IExecutionContext>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitRemoteSetUrl(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitRemoteSetPushUrl(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitSubmoduleUpdate(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitSubmoduleSync(It.IsAny<IExecutionContext>(), It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitGetFetchUrl(It.IsAny<IExecutionContext>(), It.IsAny<string>()))
.Returns(Task.FromResult<Uri>(new Uri("https://github.com/Microsoft/vsts-agent")));
_gitCommandManager
.Setup(x => x.GitDisableAutoGC(It.IsAny<IExecutionContext>(), It.IsAny<string>()))
.Returns(Task.FromResult<int>(0));
_gitCommandManager
.Setup(x => x.GitVersion(It.IsAny<IExecutionContext>()))
.Returns(Task.FromResult<Version>(new Version(2, 7)));
return _gitCommandManager;
}
private Mock<IExecutionContext> GetTestExecutionContext(TestHostContext tc, string sourceFolder, string sourceBranch, string sourceVersion, bool enableAuth)
{
var trace = tc.GetTrace();
var executionContext = new Mock<IExecutionContext>();
List<string> warnings;
executionContext
.Setup(x => x.Variables)
.Returns(new Variables(tc, copy: new Dictionary<string, string>(), maskHints: new List<MaskHint>(), warnings: out warnings));
executionContext
.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
.Callback((string tag, string message) =>
{
trace.Info($"{tag}{message}");
});
executionContext
.Setup(x => x.WriteDebug)
.Returns(true);
executionContext.Object.Variables.Set(Constants.Variables.Build.SourcesDirectory, sourceFolder);
executionContext.Object.Variables.Set(Constants.Variables.Build.SourceBranch, sourceBranch);
executionContext.Object.Variables.Set(Constants.Variables.Build.SourceVersion, sourceVersion);
executionContext.Object.Variables.Set(Constants.Variables.System.EnableAccessToken, enableAuth.ToString());
return executionContext;
}
private ServiceEndpoint GetTestSourceEndpoint(
string url,
bool clean = false,
bool checkoutSubmodules = false,
bool gitLfsSupport = false,
int fetchDepth = 0)
{
var endpoint = new ServiceEndpoint();
endpoint.Data[WellKnownEndpointData.Clean] = clean.ToString();
endpoint.Data[WellKnownEndpointData.CheckoutSubmodules] = checkoutSubmodules.ToString();
endpoint.Url = new Uri(url);
endpoint.Authorization = new EndpointAuthorization()
{
Scheme = EndpointAuthorizationSchemes.UsernamePassword
};
endpoint.Authorization.Parameters[EndpointAuthorizationParameters.Username] = "someuser";
endpoint.Authorization.Parameters[EndpointAuthorizationParameters.Password] = "SomePassword!";
endpoint.Data["FetchDepth"] = fetchDepth.ToString();
endpoint.Data["GitLfsSupport"] = gitLfsSupport.ToString();
return endpoint;
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitClone()
{
using (TestHostContext tc = new TestHostContext(this))
{
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "master", "a596e13f5db8869f44574be0392fb8fe1e790ce4", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitInit(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteAdd(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "a596e13f5db8869f44574be0392fb8fe1e790ce4", It.IsAny<CancellationToken>()));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitFetch()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
string dumyGitFolder = Path.Combine(dumySourceFolder, ".git");
Directory.CreateDirectory(dumyGitFolder);
string dumyGitConfig = Path.Combine(dumyGitFolder, "config");
File.WriteAllText(dumyGitConfig, "test git confg file");
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "master", "a596e13f5db8869f44574be0392fb8fe1e790ce4", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitDisableAutoGC(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<int>(), It.IsAny<List<string>>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "a596e13f5db8869f44574be0392fb8fe1e790ce4", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitClonePR()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/pull/12345", "a596e13f5db8869f44574be0392fb8fe1e790ce4", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitInit(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteAdd(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<int>(), new List<string>() { "+refs/heads/*:refs/remotes/origin/*", "+refs/pull/12345:refs/remotes/pull/12345" }, It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, It.Is<string>(s => s.Equals("refs/remotes/pull/12345")), It.IsAny<CancellationToken>()));
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitFetchPR()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
string dumyGitFolder = Path.Combine(dumySourceFolder, ".git");
Directory.CreateDirectory(dumyGitFolder);
string dumyGitConfig = Path.Combine(dumyGitFolder, "config");
File.WriteAllText(dumyGitConfig, "test git confg file");
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/pull/12345/merge", "a596e13f5db8869f44574be0392fb8fe1e790ce4", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitDisableAutoGC(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<int>(), new List<string>() { "+refs/heads/*:refs/remotes/origin/*", "+refs/pull/12345/merge:refs/remotes/pull/12345/merge" }, It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "refs/remotes/pull/12345/merge", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceReCloneOnUrlNotMatch()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
string dumyGitFolder = Path.Combine(dumySourceFolder, ".git");
Directory.CreateDirectory(dumyGitFolder);
string dumyGitConfig = Path.Combine(dumyGitFolder, "config");
File.WriteAllText(dumyGitConfig, "test git confg file");
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/heads/users/user1", "", true);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false);
var _gitCommandManager = GetDefaultGitCommandMock();
_gitCommandManager
.Setup(x => x.GitGetFetchUrl(It.IsAny<IExecutionContext>(), It.IsAny<string>()))
.Returns(Task.FromResult<Uri>(new Uri("https://github.com/Microsoft/vsts-another-agent")));
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitInit(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteAdd(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "refs/remotes/origin/users/user1", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitFetchWithClean()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
string dumyGitFolder = Path.Combine(dumySourceFolder, ".git");
Directory.CreateDirectory(dumyGitFolder);
string dumyGitConfig = Path.Combine(dumyGitFolder, "config");
File.WriteAllText(dumyGitConfig, "test git confg file");
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/remotes/origin/master", "", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", true, false);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitClean(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitReset(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitDisableAutoGC(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", It.Is<string>(s => s.Equals("https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent"))));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", It.Is<string>(s => s.Equals("https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent"))));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<int>(), It.IsAny<List<string>>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitRemoteSetUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitRemoteSetPushUrl(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "refs/remotes/origin/master", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitShallowFetch()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/remotes/origin/master", "", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false, false, 1);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitInit(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteAdd(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", 1, It.IsAny<List<string>>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "refs/remotes/origin/master", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourceGitFetchWithLFS()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/remotes/origin/master", "", false);
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false, true);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitInit(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteAdd(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitLFSInstall(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitConfig(executionContext.Object, dumySourceFolder, "remote.origin.lfsurl", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent.git/info/lfs"));
_gitCommandManager.Verify(x => x.GitConfig(executionContext.Object, dumySourceFolder, "remote.origin.lfspushurl", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent.git/info/lfs"));
_gitCommandManager.Verify(x => x.GitLFSFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<int>(), It.IsAny<List<string>>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "refs/remotes/origin/master", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void GetSourcePreferFeatureVariables()
{
using (TestHostContext tc = new TestHostContext(this))
{
var trace = tc.GetTrace();
// Arrange.
string dumySourceFolder = Path.Combine(IOUtil.GetBinPath(), "SourceProviderL0");
try
{
Directory.CreateDirectory(dumySourceFolder);
var executionContext = GetTestExecutionContext(tc, dumySourceFolder, "refs/remotes/origin/master", "", false);
executionContext.Object.Variables.Set("agent.source.git.lfs", "true");
executionContext.Object.Variables.Set("agent.source.git.shallowFetchDepth", "10");
var endpoint = GetTestSourceEndpoint("https://github.com/Microsoft/vsts-agent", false, false, false, 0);
var _gitCommandManager = GetDefaultGitCommandMock();
tc.SetSingleton<IGitCommandManager>(_gitCommandManager.Object);
tc.SetSingleton<IWhichUtil>(new WhichUtil());
tc.SetSingleton<IVstsAgentWebProxy>(new VstsAgentWebProxy());
GitSourceProvider gitSourceProvider = new ExternalGitSourceProvider();
gitSourceProvider.Initialize(tc);
gitSourceProvider.SetVariablesInEndpoint(executionContext.Object, endpoint);
// Act.
gitSourceProvider.GetSourceAsync(executionContext.Object, endpoint, default(CancellationToken)).GetAwaiter().GetResult();
// Assert.
_gitCommandManager.Verify(x => x.GitInit(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitRemoteAdd(executionContext.Object, dumySourceFolder, "origin", "https://github.com/Microsoft/vsts-agent"));
_gitCommandManager.Verify(x => x.GitLFSInstall(executionContext.Object, dumySourceFolder));
_gitCommandManager.Verify(x => x.GitConfig(executionContext.Object, dumySourceFolder, "remote.origin.lfsurl", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent.git/info/lfs"));
_gitCommandManager.Verify(x => x.GitConfig(executionContext.Object, dumySourceFolder, "remote.origin.lfspushurl", "https://someuser:SomePassword%21@github.com/Microsoft/vsts-agent.git/info/lfs"));
_gitCommandManager.Verify(x => x.GitLFSFetch(executionContext.Object, dumySourceFolder, "origin", It.IsAny<string>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitFetch(executionContext.Object, dumySourceFolder, "origin", 10, It.IsAny<List<string>>(), It.IsAny<string>(), It.IsAny<CancellationToken>()));
_gitCommandManager.Verify(x => x.GitCheckout(executionContext.Object, dumySourceFolder, "refs/remotes/origin/master", It.IsAny<CancellationToken>()));
}
finally
{
IOUtil.DeleteDirectory(dumySourceFolder, CancellationToken.None);
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections;
using System.Management.Automation;
using System.Management.Automation.Host;
using System.Management.Automation.Internal;
using Dbg = System.Management.Automation.Diagnostics;
namespace Microsoft.PowerShell
{
/// <summary>
/// Represents all of the outstanding progress activities received by the host, and includes methods to update that state
/// upon receipt of new ProgressRecords, and to render that state into an array of strings such that ProgressPane can
/// display it.
///
/// The set of activities that we're tracking is logically a binary tree, with siblings in one branch and children in
/// another. For ease of implementation, this tree is represented as lists of lists. We use ArrayList as out list type,
/// although List1 (generic List) would also have worked. I suspect that ArrayList is faster because there are fewer links
/// to twiddle, though I have not measured that.
///
/// This class uses lots of nearly identical helper functions to recursively traverse the tree. If I weren't so pressed
/// for time, I would see if generic methods could be used to collapse the number of traversers.
/// </summary>
internal
class PendingProgress
{
#region Updating Code
/// <summary>
/// Update the data structures that represent the outstanding progress records reported so far.
/// </summary>
/// <param name="sourceId">
/// Identifier of the source of the event. This is used as part of the "key" for matching newly received records with
/// records that have already been received. For a record to match (meaning that they refer to the same activity), both
/// the source and activity identifiers need to match.
/// </param>
/// <param name="record">
/// The ProgressRecord received that will either update the status of an activity which we are already tracking, or
/// represent a new activity that we need to track.
/// </param>
internal
void
Update(Int64 sourceId, ProgressRecord record)
{
Dbg.Assert(record != null, "record should not be null");
do
{
if (record.ParentActivityId == record.ActivityId)
{
// ignore malformed records.
break;
}
ArrayList listWhereFound = null;
int indexWhereFound = -1;
ProgressNode foundNode =
FindNodeById(sourceId, record.ActivityId, out listWhereFound, out indexWhereFound);
if (foundNode != null)
{
Dbg.Assert(listWhereFound != null, "node found, but list not identified");
Dbg.Assert(indexWhereFound >= 0, "node found, but index not returned");
if (record.RecordType == ProgressRecordType.Completed)
{
RemoveNodeAndPromoteChildren(listWhereFound, indexWhereFound);
break;
}
if (record.ParentActivityId == foundNode.ParentActivityId)
{
// record is an update to an existing activity. Copy the record data into the found node, and
// reset the age of the node.
foundNode.Activity = record.Activity;
foundNode.StatusDescription = record.StatusDescription;
foundNode.CurrentOperation = record.CurrentOperation;
foundNode.PercentComplete = Math.Min(record.PercentComplete, 100);
foundNode.SecondsRemaining = record.SecondsRemaining;
foundNode.Age = 0;
break;
}
else
{
// The record's parent Id mismatches with that of the found node's. We interpret
// this to mean that the activity represented by the record (and the found node) is
// being "re-parented" elsewhere. So we remove the found node and treat the record
// as a new activity.
RemoveNodeAndPromoteChildren(listWhereFound, indexWhereFound);
}
}
// At this point, the record's activity is not in the tree. So we need to add it.
if (record.RecordType == ProgressRecordType.Completed)
{
// We don't track completion records that don't correspond to activities we're not
// already tracking.
break;
}
ProgressNode newNode = new ProgressNode(sourceId, record);
// If we're adding a node, and we have no more space, then we need to pick a node to evict.
while (_nodeCount >= maxNodeCount)
{
EvictNode();
}
if (newNode.ParentActivityId >= 0)
{
ProgressNode parentNode = FindNodeById(newNode.SourceId, newNode.ParentActivityId);
if (parentNode != null)
{
if (parentNode.Children == null)
{
parentNode.Children = new ArrayList();
}
AddNode(parentNode.Children, newNode);
break;
}
// The parent node is not in the tree. Make the new node's parent the root,
// and add it to the tree. If the parent ever shows up, then the next time
// we receive a record for this activity, the parent id's won't match, and the
// activity will be properly re-parented.
newNode.ParentActivityId = -1;
}
AddNode(_topLevelNodes, newNode);
} while (false);
// At this point the tree is up-to-date. Make a pass to age all of the nodes
AgeNodesAndResetStyle();
}
private
void
EvictNode()
{
ArrayList listWhereFound = null;
int indexWhereFound = -1;
ProgressNode oldestNode = FindOldestLeafmostNode(out listWhereFound, out indexWhereFound);
if (oldestNode == null)
{
// Well that's a surprise. There's got to be at least one node there that's older than 0.
Dbg.Assert(false, "Must be an old node in the tree somewhere");
// We'll just pick the root node, then.
RemoveNode(_topLevelNodes, 0);
}
else
{
RemoveNode(listWhereFound, indexWhereFound);
}
}
/// <summary>
/// Removes a node from the tree.
/// </summary>
/// <param name="nodes">
/// List in the tree from which the node is to be removed.
/// </param>
/// <param name="indexToRemove">
/// Index into the list of the node to be removed.
/// </param>
private
void
RemoveNode(ArrayList nodes, int indexToRemove)
{
#if DEBUG || ASSERTIONS_TRACE
ProgressNode nodeToRemove = (ProgressNode)nodes[indexToRemove];
Dbg.Assert(nodes != null, "can't remove nodes from a null list");
Dbg.Assert(indexToRemove < nodes.Count, "index is not in list");
Dbg.Assert(nodes[indexToRemove] != null, "no node at specified index");
Dbg.Assert(nodeToRemove.Children == null || nodeToRemove.Children.Count == 0, "can't remove a node with children");
#endif
nodes.RemoveAt(indexToRemove);
--_nodeCount;
#if DEBUG || ASSERTIONS_ON
Dbg.Assert(_nodeCount == this.CountNodes(), "We've lost track of the number of nodes in the tree");
#endif
}
private
void
RemoveNodeAndPromoteChildren(ArrayList nodes, int indexToRemove)
{
ProgressNode nodeToRemove = (ProgressNode)nodes[indexToRemove];
Dbg.Assert(nodes != null, "can't remove nodes from a null list");
Dbg.Assert(indexToRemove < nodes.Count, "index is not in list");
Dbg.Assert(nodeToRemove != null, "no node at specified index");
if (nodeToRemove == null)
{
return;
}
if (nodeToRemove.Children != null)
{
// promote the children.
for (int i = 0; i < nodeToRemove.Children.Count; ++i)
{
// unparent the children. If the children are ever updated again, they will be reparented.
((ProgressNode)nodeToRemove.Children[i]).ParentActivityId = -1;
}
// add the children as siblings
nodes.RemoveAt(indexToRemove);
--_nodeCount;
nodes.InsertRange(indexToRemove, nodeToRemove.Children);
#if DEBUG || ASSERTIONS_TRACE
Dbg.Assert(_nodeCount == this.CountNodes(), "We've lost track of the number of nodes in the tree");
#endif
}
else
{
// nothing to promote
RemoveNode(nodes, indexToRemove);
return;
}
}
/// <summary>
/// Adds a node to the tree, first removing the oldest node if the tree is too large.
/// </summary>
/// <param name="nodes">
/// List in the tree where the node is to be added.
/// </param>
/// <param name="nodeToAdd">
/// Node to be added.
/// </param>
private
void
AddNode(ArrayList nodes, ProgressNode nodeToAdd)
{
nodes.Add(nodeToAdd);
++_nodeCount;
#if DEBUG || ASSERTIONS_TRACE
Dbg.Assert(_nodeCount == this.CountNodes(), "We've lost track of the number of nodes in the tree");
Dbg.Assert(_nodeCount <= maxNodeCount, "Too many nodes in tree!");
#endif
}
private
class FindOldestNodeVisitor : NodeVisitor
{
internal override
bool
Visit(ProgressNode node, ArrayList listWhereFound, int indexWhereFound)
{
if (node.Age >= _oldestSoFar)
{
_oldestSoFar = node.Age;
FoundNode = node;
this.ListWhereFound = listWhereFound;
this.IndexWhereFound = indexWhereFound;
}
return true;
}
internal
ProgressNode
FoundNode;
internal
ArrayList
ListWhereFound;
internal
int
IndexWhereFound = -1;
private int _oldestSoFar;
}
private
ProgressNode
FindOldestLeafmostNodeHelper(ArrayList treeToSearch, out ArrayList listWhereFound, out int indexWhereFound)
{
listWhereFound = null;
indexWhereFound = -1;
FindOldestNodeVisitor v = new FindOldestNodeVisitor();
NodeVisitor.VisitNodes(treeToSearch, v);
listWhereFound = v.ListWhereFound;
indexWhereFound = v.IndexWhereFound;
#if DEBUG || ASSERTIONS_TRACE
if (v.FoundNode == null)
{
Dbg.Assert(listWhereFound == null, "list should be null when no node found");
Dbg.Assert(indexWhereFound == -1, "index should indicate no node found");
Dbg.Assert(_topLevelNodes.Count == 0, "if there is no oldest node, then the tree must be empty");
Dbg.Assert(_nodeCount == 0, "if there is no oldest node, then the tree must be empty");
}
#endif
return v.FoundNode;
}
private
ProgressNode
FindOldestLeafmostNode(out ArrayList listWhereFound, out int indexWhereFound)
{
listWhereFound = null;
indexWhereFound = -1;
ProgressNode result = null;
ArrayList treeToSearch = _topLevelNodes;
do
{
result = FindOldestLeafmostNodeHelper(treeToSearch, out listWhereFound, out indexWhereFound);
if (result == null || result.Children == null || result.Children.Count == 0)
{
break;
}
// search the subtree for the oldest child
treeToSearch = result.Children;
} while (true);
return result;
}
/// <summary>
/// Convenience overload.
/// </summary>
private
ProgressNode
FindNodeById(Int64 sourceId, int activityId)
{
ArrayList listWhereFound = null;
int indexWhereFound = -1;
return
FindNodeById(sourceId, activityId, out listWhereFound, out indexWhereFound);
}
private
class FindByIdNodeVisitor : NodeVisitor
{
internal
FindByIdNodeVisitor(Int64 sourceIdToFind, int activityIdToFind)
{
_sourceIdToFind = sourceIdToFind;
_idToFind = activityIdToFind;
}
internal override
bool
Visit(ProgressNode node, ArrayList listWhereFound, int indexWhereFound)
{
if (node.ActivityId == _idToFind && node.SourceId == _sourceIdToFind)
{
this.FoundNode = node;
this.ListWhereFound = listWhereFound;
this.IndexWhereFound = indexWhereFound;
return false;
}
return true;
}
internal
ProgressNode
FoundNode;
internal
ArrayList
ListWhereFound;
internal
int
IndexWhereFound = -1;
private int _idToFind = -1;
private Int64 _sourceIdToFind;
}
/// <summary>
/// Finds a node with a given ActivityId in provided set of nodes. Recursively walks the set of nodes and their children.
/// </summary>
/// <param name="sourceId">
/// Identifier of the source of the record.
/// </param>
/// <param name="activityId">
/// ActivityId to search for.
/// </param>
/// <param name="listWhereFound">
/// Receives reference to the List where the found node was located, or null if no suitable node was found.
/// </param>
/// <param name="indexWhereFound">
/// Receives the index into listWhereFound that indicating where in the list the node was located, or -1 if
/// no suitable node was found.
/// </param>
/// <returns>
/// The found node, or null if no suitable node was located.
/// </returns>
private
ProgressNode
FindNodeById(Int64 sourceId, int activityId, out ArrayList listWhereFound, out int indexWhereFound)
{
listWhereFound = null;
indexWhereFound = -1;
FindByIdNodeVisitor v = new FindByIdNodeVisitor(sourceId, activityId);
NodeVisitor.VisitNodes(_topLevelNodes, v);
listWhereFound = v.ListWhereFound;
indexWhereFound = v.IndexWhereFound;
#if DEBUG || ASSERTIONS_TRACE
if (v.FoundNode == null)
{
Dbg.Assert(listWhereFound == null, "list should be null when no node found");
Dbg.Assert(indexWhereFound == -1, "index should indicate no node found");
}
#endif
return v.FoundNode;
}
/// <summary>
/// Finds the oldest node with a given rendering style that is at least as old as a given age.
/// </summary>
/// <param name="nodes">
/// List of nodes to search. Child lists of each node in this list will also be searched.
/// </param>
/// <param name="oldestSoFar"></param>
/// The minimum age of the node to be located. To find the oldest node, pass 0.
/// <param name="style">
/// The rendering style of the node to be located.
/// </param>
/// <returns>
/// The found node, or null if no suitable node was located.
/// </returns>
private
ProgressNode
FindOldestNodeOfGivenStyle(ArrayList nodes, int oldestSoFar, ProgressNode.RenderStyle style)
{
if (nodes == null)
{
return null;
}
ProgressNode found = null;
for (int i = 0; i < nodes.Count; ++i)
{
ProgressNode node = (ProgressNode)nodes[i];
Dbg.Assert(node != null, "nodes should not contain null elements");
if (node.Age >= oldestSoFar && node.Style == style)
{
found = node;
oldestSoFar = found.Age;
}
if (node.Children != null)
{
ProgressNode child = FindOldestNodeOfGivenStyle(node.Children, oldestSoFar, style);
if (child != null)
{
// In this universe, parents can be younger than their children. We found a child older than us.
found = child;
oldestSoFar = found.Age;
}
}
}
#if DEBUG || ASSERTIONS_TRACE
if (found != null)
{
Dbg.Assert(found.Style == style, "unexpected style");
Dbg.Assert(found.Age >= oldestSoFar, "unexpected age");
}
#endif
return found;
}
private
class AgeAndResetStyleVisitor : NodeVisitor
{
internal override
bool
Visit(ProgressNode node, ArrayList unused, int unusedToo)
{
node.Age = Math.Min(node.Age + 1, Int32.MaxValue - 1);
node.Style = ProgressNode.RenderStyle.FullPlus;
return true;
}
}
/// <summary>
/// Increments the age of each of the nodes in the given list, and all their children. Also sets the rendering
/// style of each node to "full."
///
/// All nodes are aged every time a new ProgressRecord is received.
/// </summary>
private
void
AgeNodesAndResetStyle()
{
AgeAndResetStyleVisitor arsv = new AgeAndResetStyleVisitor();
NodeVisitor.VisitNodes(_topLevelNodes, arsv);
}
#endregion // Updating Code
#region Rendering Code
/// <summary>
/// Generates an array of strings representing as much of the outstanding progress activities as possible within the given
/// space. As more outstanding activities are collected, nodes are "compressed" (i.e. rendered in an increasing terse
/// fashion) in order to display as many as possible. Ultimately, some nodes may be compressed to the point of
/// invisibility. The oldest nodes are compressed first.
/// </summary>
/// <param name="maxWidth">
/// The maximum width (in BufferCells) that the rendering may consume.
/// </param>
/// <param name="maxHeight">
/// The maximum height (in BufferCells) that the rendering may consume.
/// </param>
/// <param name="rawUI">
/// The PSHostRawUserInterface used to gauge string widths in the rendering.
/// </param>
/// <returns>
/// An array of strings containing the textual representation of the outstanding progress activities.
/// </returns>
internal
string[]
Render(int maxWidth, int maxHeight, PSHostRawUserInterface rawUI)
{
Dbg.Assert(_topLevelNodes != null, "Shouldn't need to render progress if no data exists");
Dbg.Assert(maxWidth > 0, "maxWidth is too small");
Dbg.Assert(maxHeight >= 3, "maxHeight is too small");
if (_topLevelNodes == null || _topLevelNodes.Count <= 0)
{
// we have nothing to render.
return null;
}
int invisible = 0;
if (TallyHeight(rawUI, maxHeight, maxWidth) > maxHeight)
{
// This will smash down nodes until the tree will fit into the alloted number of lines. If in the
// process some nodes were made invisible, we will add a line to the display to say so.
invisible = CompressToFit(rawUI, maxHeight, maxWidth);
}
ArrayList result = new ArrayList();
string border = StringUtil.Padding(maxWidth);
result.Add(border);
RenderHelper(result, _topLevelNodes, 0, maxWidth, rawUI);
if (invisible == 1)
{
result.Add(
" "
+ StringUtil.Format(
ProgressNodeStrings.InvisibleNodesMessageSingular,
invisible));
}
else if (invisible > 1)
{
result.Add(
" "
+ StringUtil.Format(
ProgressNodeStrings.InvisibleNodesMessagePlural,
invisible));
}
result.Add(border);
return (string[])result.ToArray(typeof(string));
}
/// <summary>
/// Helper function for Render(). Recursively renders nodes.
/// </summary>
/// <param name="strings">
/// The rendered strings so far. Additional rendering will be appended.
/// </param>
/// <param name="nodes">
/// The nodes to be rendered. All child nodes will also be rendered.
/// </param>
/// <param name="indentation">
/// The current indentation level (in BufferCells).
/// </param>
/// <param name="maxWidth">
/// The maximum number of BufferCells that the rendering can consume, horizontally.
/// </param>
/// <param name="rawUI">
/// The PSHostRawUserInterface used to gauge string widths in the rendering.
/// </param>
private
void
RenderHelper(ArrayList strings, ArrayList nodes, int indentation, int maxWidth, PSHostRawUserInterface rawUI)
{
Dbg.Assert(strings != null, "strings should not be null");
Dbg.Assert(nodes != null, "nodes should not be null");
if (nodes == null)
{
return;
}
foreach (ProgressNode node in nodes)
{
int lines = strings.Count;
node.Render(strings, indentation, maxWidth, rawUI);
if (node.Children != null)
{
// indent only if the rendering of node actually added lines to the strings.
int indentationIncrement = (strings.Count > lines) ? 2 : 0;
RenderHelper(strings, node.Children, indentation + indentationIncrement, maxWidth, rawUI);
}
}
}
private
class HeightTallyer : NodeVisitor
{
internal HeightTallyer(PSHostRawUserInterface rawUi, int maxHeight, int maxWidth)
{
_rawUi = rawUi;
_maxHeight = maxHeight;
_maxWidth = maxWidth;
}
internal override
bool
Visit(ProgressNode node, ArrayList unused, int unusedToo)
{
Tally += node.LinesRequiredMethod(_rawUi, _maxWidth);
// We don't need to walk all the nodes, once it's larger than the max height, we should stop
if (Tally > _maxHeight)
{
return false;
}
return true;
}
private PSHostRawUserInterface _rawUi;
private int _maxHeight;
private int _maxWidth;
internal int Tally;
}
/// <summary>
/// Tallies up the number of BufferCells vertically that will be required to show all the ProgressNodes in the given
/// list, and all of their children.
/// </summary>
/// <param name="maxHeight">
/// The maximum height (in BufferCells) that the rendering may consume.
/// </param>
/// <param name="rawUi">
/// The PSHostRawUserInterface used to gauge string widths in the rendering.
/// </param>
/// <returns>
/// The vertical height (in BufferCells) that will be required to show all of the nodes in the given list.
/// </returns>
/// <param name="maxWidth">
/// </param>
private int TallyHeight(PSHostRawUserInterface rawUi, int maxHeight, int maxWidth)
{
HeightTallyer ht = new HeightTallyer(rawUi, maxHeight, maxWidth);
NodeVisitor.VisitNodes(_topLevelNodes, ht);
return ht.Tally;
}
#if DEBUG || ASSERTIONS_TRACE
/// <summary>
/// Debugging code. Verifies that all of the nodes in the given list have the given style.
/// </summary>
/// <param name="nodes"></param>
/// <param name="style"></param>
/// <returns></returns>
private
bool
AllNodesHaveGivenStyle(ArrayList nodes, ProgressNode.RenderStyle style)
{
if (nodes == null)
{
return false;
}
for (int i = 0; i < nodes.Count; ++i)
{
ProgressNode node = (ProgressNode)nodes[i];
Dbg.Assert(node != null, "nodes should not contain null elements");
if (node.Style != style)
{
return false;
}
if (node.Children != null)
{
if (!AllNodesHaveGivenStyle(node.Children, style))
{
return false;
}
}
}
return true;
}
/// <summary>
/// Debugging code. NodeVisitor that counts up the number of nodes in the tree.
/// </summary>
private
class
CountingNodeVisitor : NodeVisitor
{
internal override
bool
Visit(ProgressNode unused, ArrayList unusedToo, int unusedThree)
{
++Count;
return true;
}
internal
int
Count;
}
/// <summary>
/// Debugging code. Counts the number of nodes in the tree of nodes.
/// </summary>
/// <returns>
/// The number of nodes in the tree.
/// </returns>
private
int
CountNodes()
{
CountingNodeVisitor cnv = new CountingNodeVisitor();
NodeVisitor.VisitNodes(_topLevelNodes, cnv);
return cnv.Count;
}
#endif
/// <summary>
/// Helper function to CompressToFit. Considers compressing nodes from one level to another.
/// </summary>
/// <param name="rawUi">
/// The PSHostRawUserInterface used to gauge string widths in the rendering.
/// </param>
/// <param name="maxHeight">
/// The maximum height (in BufferCells) that the rendering may consume.
/// </param>
/// <param name="maxWidth">
/// The maximum width (in BufferCells) that the rendering may consume.
/// </param>
/// <param name="nodesCompressed">
/// Receives the number of nodes that were compressed. If the result of the method is false, then this will be the total
/// number of nodes being tracked (i.e. all of them will have been compressed).
/// </param>
/// <param name="priorStyle">
/// The rendering style (e.g. "compression level") that the nodes are expected to currently have.
/// </param>
/// <param name="newStyle">
/// The new rendering style that a node will have when it is compressed. If the result of the method is false, then all
/// nodes will have this rendering style.
/// </param>
/// <returns>
/// true to indicate that the nodes are compressed to the point that their rendering will fit within the constraint, or
/// false to indicate that all of the nodes are compressed to a given level, but that the rendering still can't fit
/// within the constraint.
/// </returns>
private
bool
CompressToFitHelper(
PSHostRawUserInterface rawUi,
int maxHeight,
int maxWidth,
out int nodesCompressed,
ProgressNode.RenderStyle priorStyle,
ProgressNode.RenderStyle newStyle)
{
nodesCompressed = 0;
int age = 0;
do
{
ProgressNode node = FindOldestNodeOfGivenStyle(_topLevelNodes, age, priorStyle);
if (node == null)
{
// We've compressed every node of the prior style already.
break;
}
node.Style = newStyle;
++nodesCompressed;
if (TallyHeight(rawUi, maxHeight, maxWidth) <= maxHeight)
{
return true;
}
} while (true);
// If we get all the way to here, then we've compressed all the nodes and we still don't fit.
#if DEBUG || ASSERTIONS_TRACE
Dbg.Assert(
nodesCompressed == CountNodes(),
"We should have compressed every node in the tree.");
Dbg.Assert(
AllNodesHaveGivenStyle(_topLevelNodes, newStyle),
"We should have compressed every node in the tree.");
#endif
return false;
}
/// <summary>
/// "Compresses" the nodes representing the outstanding progress activities until their rendering will fit within a
/// "given height, or until they are compressed to a given level. The oldest nodes are compressed first.
///
/// This is a 4-stage process -- from least compressed to "invisible". At each stage we find the oldest nodes in the
/// tree and change their rendering style to a more compact style. As soon as the rendering of the nodes will fit within
/// the maxHeight, we stop. The result is that the most recent nodes will be the least compressed, the idea being that
/// the rendering should show the most recently updated activities with the most complete rendering for them possible.
/// </summary>
/// <param name="rawUi">
/// The PSHostRawUserInterface used to gauge string widths in the rendering.
/// </param>
/// <param name="maxHeight">
/// The maximum height (in BufferCells) that the rendering may consume.
/// </param>
/// <param name="maxWidth">
/// The maximum width (in BufferCells) that the rendering may consume.
/// </param>
/// <returns>
/// The number of nodes that were made invisible during the compression.
///
///</returns>
private
int
CompressToFit(PSHostRawUserInterface rawUi, int maxHeight, int maxWidth)
{
Dbg.Assert(_topLevelNodes != null, "Shouldn't need to compress if no data exists");
int nodesCompressed = 0;
// This algorithm potentially makes many, many passes over the tree. It might be possible to optimize
// that some, but I'm not trying to be too clever just yet.
if (
CompressToFitHelper(
rawUi,
maxHeight,
maxWidth,
out nodesCompressed,
ProgressNode.RenderStyle.FullPlus,
ProgressNode.RenderStyle.Full))
{
return 0;
}
if (
CompressToFitHelper(
rawUi,
maxHeight,
maxWidth,
out nodesCompressed,
ProgressNode.RenderStyle.Full,
ProgressNode.RenderStyle.Compact))
{
return 0;
}
if (
CompressToFitHelper(
rawUi,
maxHeight,
maxWidth,
out nodesCompressed,
ProgressNode.RenderStyle.Compact,
ProgressNode.RenderStyle.Minimal))
{
return 0;
}
if (
CompressToFitHelper(
rawUi,
maxHeight,
maxWidth,
out nodesCompressed,
ProgressNode.RenderStyle.Minimal,
ProgressNode.RenderStyle.Invisible))
{
// The nodes that we compressed here are now invisible.
return nodesCompressed;
}
Dbg.Assert(false, "with all nodes invisible, we should never reach this point.");
return 0;
}
#endregion // Rendering Code
#region Utility Code
private abstract
class NodeVisitor
{
/// <summary>
/// Called for each node in the tree.
/// </summary>
/// <param name="node">
/// The node being visited.
/// </param>
/// <param name="listWhereFound">
/// The list in which the node resides.
/// </param>
/// <param name="indexWhereFound">
/// The index into listWhereFound of the node.
/// </param>
/// <returns>
/// true to continue visiting nodes, false if not.
/// </returns>
internal abstract
bool
Visit(ProgressNode node, ArrayList listWhereFound, int indexWhereFound);
internal static
void
VisitNodes(ArrayList nodes, NodeVisitor v)
{
if (nodes == null)
{
return;
}
for (int i = 0; i < nodes.Count; ++i)
{
ProgressNode node = (ProgressNode)nodes[i];
Dbg.Assert(node != null, "nodes should not contain null elements");
if (!v.Visit(node, nodes, i))
{
return;
}
if (node.Children != null)
{
VisitNodes(node.Children, v);
}
}
}
}
#endregion
private ArrayList _topLevelNodes = new ArrayList();
private int _nodeCount;
private const int maxNodeCount = 128;
}
} // namespace
| |
using BEPUphysics.BroadPhaseEntries.MobileCollidables;
using BEPUphysics.EntityStateManagement;
using BEPUphysics.CollisionShapes.ConvexShapes;
using BEPUutilities;
namespace BEPUphysics.Entities.Prefabs
{
/// <summary>
/// Triangle-shaped object that can collide and move. After making an entity, add it to a Space so that the engine can manage it.
/// </summary>
public class Triangle : Entity<ConvexCollidable<TriangleShape>>
{
///<summary>
/// Gets or sets the first vertex of the triangle in local space.
///</summary>
public Vector3 LocalVertexA
{
get
{
return CollisionInformation.Shape.VertexA;
}
set
{
CollisionInformation.Shape.VertexA = value;
}
}
///<summary>
/// Gets or sets the second vertex of the triangle in local space.
///</summary>
public Vector3 LocalVertexB
{
get
{
return CollisionInformation.Shape.VertexB;
}
set
{
CollisionInformation.Shape.VertexB = value;
}
}
///<summary>
/// Gets or sets the third vertex of the triangle in local space.
///</summary>
public Vector3 LocalVertexC
{
get
{
return CollisionInformation.Shape.VertexC;
}
set
{
CollisionInformation.Shape.VertexC = value;
}
}
///<summary>
/// Gets or sets the first vertex of the triangle in world space.
///</summary>
public Vector3 VertexA
{
get
{
return Matrix3x3.Transform(CollisionInformation.Shape.VertexA, orientationMatrix) + position;
}
set
{
CollisionInformation.Shape.VertexA = Matrix3x3.TransformTranspose(value - position, orientationMatrix);
}
}
///<summary>
/// Gets or sets the second vertex of the triangle in world space.
///</summary>
public Vector3 VertexB
{
get
{
return Matrix3x3.Transform(CollisionInformation.Shape.VertexB, orientationMatrix) + position;
}
set
{
CollisionInformation.Shape.VertexB = Matrix3x3.TransformTranspose(value - position, orientationMatrix);
}
}
///<summary>
/// Gets or sets the third vertex of the triangle in world space.
///</summary>
public Vector3 VertexC
{
get
{
return Matrix3x3.Transform(CollisionInformation.Shape.VertexC, orientationMatrix) + position;
}
set
{
CollisionInformation.Shape.VertexC = Matrix3x3.TransformTranspose(value - position, orientationMatrix);
}
}
///<summary>
/// Gets or sets the sidedness of the triangle.
///</summary>
public TriangleSidedness Sidedness
{
get { return CollisionInformation.Shape.Sidedness; }
set
{
CollisionInformation.Shape.Sidedness = value;
}
}
/// <summary>
/// Constructs a dynamic triangle.
/// </summary>
/// <param name="v1">Position of the first vertex.</param>
/// <param name="v2">Position of the second vertex.</param>
/// <param name="v3">Position of the third vertex.</param>
/// <param name="mass">Mass of the object.</param>
public Triangle(Vector3 v1, Vector3 v2, Vector3 v3, float mass)
{
Vector3 center;
var shape = new TriangleShape(v1, v2, v3, out center);
Initialize(new ConvexCollidable<TriangleShape>(shape), mass);
Position = center;
}
/// <summary>
/// Constructs a nondynamic triangle.
/// </summary>
/// <param name="v1">Position of the first vertex.</param>
/// <param name="v2">Position of the second vertex.</param>
/// <param name="v3">Position of the third vertex.</param>
public Triangle(Vector3 v1, Vector3 v2, Vector3 v3)
{
Vector3 center;
var shape = new TriangleShape(v1, v2, v3, out center);
Initialize(new ConvexCollidable<TriangleShape>(shape));
Position = center;
}
/// <summary>
/// Constructs a dynamic triangle.
/// </summary>
/// <param name="pos">Position where the triangle is initialy centered.</param>
/// <param name="v1">Position of the first vertex.</param>
/// <param name="v2">Position of the second vertex.</param>
/// <param name="v3">Position of the third vertex.</param>
/// <param name="mass">Mass of the object.</param>
public Triangle(Vector3 pos, Vector3 v1, Vector3 v2, Vector3 v3, float mass)
: this(v1, v2, v3, mass)
{
Position = pos;
}
/// <summary>
/// Constructs a nondynamic triangle.
/// </summary>
/// <param name="pos">Position where the triangle is initially centered.</param>
/// <param name="v1">Position of the first vertex.</param>
/// <param name="v2">Position of the second vertex.</param>
/// <param name="v3">Position of the third vertex.</param>
public Triangle(Vector3 pos, Vector3 v1, Vector3 v2, Vector3 v3)
: this(v1, v2, v3)
{
Position = pos;
}
/// <summary>
/// Constructs a dynamic triangle.
/// </summary>
/// <param name="motionState">Motion state specifying the entity's initial state.</param>
/// <param name="v1">Position of the first vertex.</param>
/// <param name="v2">Position of the second vertex.</param>
/// <param name="v3">Position of the third vertex.</param>
/// <param name="mass">Mass of the object.</param>
public Triangle(MotionState motionState, Vector3 v1, Vector3 v2, Vector3 v3, float mass)
: this(v1, v2, v3, mass)
{
MotionState = motionState;
}
/// <summary>
/// Constructs a nondynamic triangle.
/// </summary>
/// <param name="motionState">Motion state specifying the entity's initial state.</param>
/// <param name="v1">Position of the first vertex.</param>
/// <param name="v2">Position of the second vertex.</param>
/// <param name="v3">Position of the third vertex.</param>
public Triangle(MotionState motionState, Vector3 v1, Vector3 v2, Vector3 v3)
: this(v1, v2, v3)
{
MotionState = motionState;
}
}
}
| |
#region Copyright notice and license
// Copyright 2015 gRPC authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Grpc.Core.Logging;
using Grpc.Core.Profiling;
using Grpc.Core.Utils;
namespace Grpc.Core.Internal
{
/// <summary>
/// Pool of threads polling on a set of completions queues.
/// </summary>
internal class GrpcThreadPool
{
static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<GrpcThreadPool>();
const int FinishContinuationsSleepMillis = 10;
const int MaxFinishContinuationsSleepTotalMillis = 10000;
readonly GrpcEnvironment environment;
readonly object myLock = new object();
readonly List<Thread> threads = new List<Thread>();
readonly int poolSize;
readonly int completionQueueCount;
readonly bool inlineHandlers;
readonly WaitCallback runCompletionQueueEventCallbackSuccess;
readonly WaitCallback runCompletionQueueEventCallbackFailure;
readonly AtomicCounter queuedContinuationCounter = new AtomicCounter();
readonly List<BasicProfiler> threadProfilers = new List<BasicProfiler>(); // profilers assigned to threadpool threads
bool stopRequested;
IReadOnlyCollection<CompletionQueueSafeHandle> completionQueues;
/// <summary>
/// Creates a thread pool threads polling on a set of completions queues.
/// </summary>
/// <param name="environment">Environment.</param>
/// <param name="poolSize">Pool size.</param>
/// <param name="completionQueueCount">Completion queue count.</param>
/// <param name="inlineHandlers">Handler inlining.</param>
public GrpcThreadPool(GrpcEnvironment environment, int poolSize, int completionQueueCount, bool inlineHandlers)
{
this.environment = environment;
this.poolSize = poolSize;
this.completionQueueCount = completionQueueCount;
this.inlineHandlers = inlineHandlers;
GrpcPreconditions.CheckArgument(poolSize >= completionQueueCount,
"Thread pool size cannot be smaller than the number of completion queues used.");
this.runCompletionQueueEventCallbackSuccess = new WaitCallback((callback) => RunCompletionQueueEventCallback((IOpCompletionCallback) callback, true));
this.runCompletionQueueEventCallbackFailure = new WaitCallback((callback) => RunCompletionQueueEventCallback((IOpCompletionCallback) callback, false));
}
public void Start()
{
lock (myLock)
{
GrpcPreconditions.CheckState(completionQueues == null, "Already started.");
completionQueues = CreateCompletionQueueList(environment, completionQueueCount);
for (int i = 0; i < poolSize; i++)
{
var optionalProfiler = i < threadProfilers.Count ? threadProfilers[i] : null;
threads.Add(CreateAndStartThread(i, optionalProfiler));
}
}
}
public Task StopAsync()
{
lock (myLock)
{
GrpcPreconditions.CheckState(!stopRequested, "Stop already requested.");
stopRequested = true;
foreach (var cq in completionQueues)
{
cq.Shutdown();
}
}
return Task.Run(() =>
{
foreach (var thread in threads)
{
thread.Join();
}
foreach (var cq in completionQueues)
{
cq.Dispose();
}
for (int i = 0; i < threadProfilers.Count; i++)
{
threadProfilers[i].Dump(string.Format("grpc_trace_thread_{0}.txt", i));
}
});
}
/// <summary>
/// Returns true if there is at least one thread pool thread that hasn't
/// already stopped.
/// Threads can either stop because all completion queues shut down or
/// because all foreground threads have already shutdown and process is
/// going to exit.
/// </summary>
internal bool IsAlive
{
get
{
return threads.Any(t => t.ThreadState != ThreadState.Stopped);
}
}
internal IReadOnlyCollection<CompletionQueueSafeHandle> CompletionQueues
{
get
{
return completionQueues;
}
}
private Thread CreateAndStartThread(int threadIndex, IProfiler optionalProfiler)
{
var cqIndex = threadIndex % completionQueues.Count;
var cq = completionQueues.ElementAt(cqIndex);
var thread = new Thread(new ThreadStart(() => RunHandlerLoop(cq, optionalProfiler)));
thread.IsBackground = true;
thread.Name = string.Format("grpc {0} (cq {1})", threadIndex, cqIndex);
thread.Start();
return thread;
}
/// <summary>
/// Body of the polling thread.
/// </summary>
private void RunHandlerLoop(CompletionQueueSafeHandle cq, IProfiler optionalProfiler)
{
if (optionalProfiler != null)
{
Profilers.SetForCurrentThread(optionalProfiler);
}
CompletionQueueEvent ev;
do
{
ev = cq.Next();
if (ev.type == CompletionQueueEvent.CompletionType.OpComplete)
{
bool success = (ev.success != 0);
IntPtr tag = ev.tag;
try
{
var callback = cq.CompletionRegistry.Extract(tag);
queuedContinuationCounter.Increment();
if (!inlineHandlers)
{
// Use cached delegates to avoid unnecessary allocations
ThreadPool.QueueUserWorkItem(success ? runCompletionQueueEventCallbackSuccess : runCompletionQueueEventCallbackFailure, callback);
}
else
{
RunCompletionQueueEventCallback(callback, success);
}
}
catch (Exception e)
{
Logger.Error(e, "Exception occurred while extracting event from completion registry.");
}
}
}
while (ev.type != CompletionQueueEvent.CompletionType.Shutdown);
// Continuations are running on default threadpool that consists of background threads.
// GrpcThreadPool thread (a foreground thread) will not exit unless all queued work had
// been finished to prevent terminating the continuations queued prematurely.
int sleepIterations = 0;
while (queuedContinuationCounter.Count != 0)
{
// Only happens on shutdown and having pending continuations shouldn't very common,
// so sleeping here for a little bit is fine.
if (sleepIterations >= MaxFinishContinuationsSleepTotalMillis / FinishContinuationsSleepMillis)
{
Logger.Warning("Shutting down gRPC thread [{0}] with unfinished callbacks (Timed out waiting for callbacks to finish).",
Thread.CurrentThread.Name);
break;
}
Thread.Sleep(FinishContinuationsSleepMillis);
sleepIterations ++;
}
}
private static IReadOnlyCollection<CompletionQueueSafeHandle> CreateCompletionQueueList(GrpcEnvironment environment, int completionQueueCount)
{
var list = new List<CompletionQueueSafeHandle>();
for (int i = 0; i < completionQueueCount; i++)
{
var completionRegistry = new CompletionRegistry(environment, () => environment.BatchContextPool.Lease(), () => environment.RequestCallContextPool.Lease());
list.Add(CompletionQueueSafeHandle.CreateAsync(completionRegistry));
}
return list.AsReadOnly();
}
private void RunCompletionQueueEventCallback(IOpCompletionCallback callback, bool success)
{
try
{
callback.OnComplete(success);
}
catch (Exception e)
{
Logger.Error(e, "Exception occurred while invoking completion delegate");
}
finally
{
queuedContinuationCounter.Decrement();
}
}
}
}
| |
using System;
using Csla;
using Invoices.DataAccess;
namespace Invoices.Business
{
/// <summary>
/// SupplierProductItem (editable child object).<br/>
/// This is a generated <see cref="SupplierProductItem"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="SupplierProductColl"/> collection.
/// </remarks>
[Serializable]
public partial class SupplierProductItem : BusinessBase<SupplierProductItem>
{
#region Static Fields
private static int _lastId;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="ProductSupplierId"/> property.
/// </summary>
[NotUndoable]
public static readonly PropertyInfo<int> ProductSupplierIdProperty = RegisterProperty<int>(p => p.ProductSupplierId, "Product Supplier Id");
/// <summary>
/// Gets the Product Supplier Id.
/// </summary>
/// <value>The Product Supplier Id.</value>
public int ProductSupplierId
{
get { return GetProperty(ProductSupplierIdProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="ProductId"/> property.
/// </summary>
public static readonly PropertyInfo<Guid> ProductIdProperty = RegisterProperty<Guid>(p => p.ProductId, "Product Id");
/// <summary>
/// Gets or sets the Product Id.
/// </summary>
/// <value>The Product Id.</value>
public Guid ProductId
{
get { return GetProperty(ProductIdProperty); }
set { SetProperty(ProductIdProperty, value); }
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="SupplierProductItem"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public SupplierProductItem()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="SupplierProductItem"/> object properties.
/// </summary>
[RunLocal]
protected override void Child_Create()
{
LoadProperty(ProductSupplierIdProperty, System.Threading.Interlocked.Decrement(ref _lastId));
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="SupplierProductItem"/> object from the given <see cref="SupplierProductItemDto"/>.
/// </summary>
/// <param name="data">The SupplierProductItemDto to use.</param>
private void Child_Fetch(SupplierProductItemDto data)
{
// Value properties
LoadProperty(ProductSupplierIdProperty, data.ProductSupplierId);
LoadProperty(ProductIdProperty, data.ProductId);
var args = new DataPortalHookArgs(data);
OnFetchRead(args);
// check all object rules and property rules
BusinessRules.CheckRules();
}
/// <summary>
/// Inserts a new <see cref="SupplierProductItem"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(SupplierEdit parent)
{
var dto = new SupplierProductItemDto();
dto.Parent_SupplierId = parent.SupplierId;
dto.ProductId = ProductId;
using (var dalManager = DalFactoryInvoices.GetManager())
{
var args = new DataPortalHookArgs(dto);
OnInsertPre(args);
var dal = dalManager.GetProvider<ISupplierProductItemDal>();
using (BypassPropertyChecks)
{
var resultDto = dal.Insert(dto);
LoadProperty(ProductSupplierIdProperty, resultDto.ProductSupplierId);
args = new DataPortalHookArgs(resultDto);
}
OnInsertPost(args);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="SupplierProductItem"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
var dto = new SupplierProductItemDto();
dto.ProductSupplierId = ProductSupplierId;
dto.ProductId = ProductId;
using (var dalManager = DalFactoryInvoices.GetManager())
{
var args = new DataPortalHookArgs(dto);
OnUpdatePre(args);
var dal = dalManager.GetProvider<ISupplierProductItemDal>();
using (BypassPropertyChecks)
{
var resultDto = dal.Update(dto);
args = new DataPortalHookArgs(resultDto);
}
OnUpdatePost(args);
}
}
/// <summary>
/// Self deletes the <see cref="SupplierProductItem"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var dalManager = DalFactoryInvoices.GetManager())
{
var args = new DataPortalHookArgs();
OnDeletePre(args);
var dal = dalManager.GetProvider<ISupplierProductItemDal>();
using (BypassPropertyChecks)
{
dal.Delete(ReadProperty(ProductSupplierIdProperty));
}
OnDeletePost(args);
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace ParentLoad.Business.ERCLevel
{
/// <summary>
/// B09_Region_Child (editable child object).<br/>
/// This is a generated base class of <see cref="B09_Region_Child"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="B08_Region"/> collection.
/// </remarks>
[Serializable]
public partial class B09_Region_Child : BusinessBase<B09_Region_Child>
{
#region State Fields
[NotUndoable]
[NonSerialized]
internal int region_ID1 = 0;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Region_Child_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Region_Child_NameProperty = RegisterProperty<string>(p => p.Region_Child_Name, "Region Child Name");
/// <summary>
/// Gets or sets the Region Child Name.
/// </summary>
/// <value>The Region Child Name.</value>
public string Region_Child_Name
{
get { return GetProperty(Region_Child_NameProperty); }
set { SetProperty(Region_Child_NameProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="B09_Region_Child"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="B09_Region_Child"/> object.</returns>
internal static B09_Region_Child NewB09_Region_Child()
{
return DataPortal.CreateChild<B09_Region_Child>();
}
/// <summary>
/// Factory method. Loads a <see cref="B09_Region_Child"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="B09_Region_Child"/> object.</returns>
internal static B09_Region_Child GetB09_Region_Child(SafeDataReader dr)
{
B09_Region_Child obj = new B09_Region_Child();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="B09_Region_Child"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public B09_Region_Child()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="B09_Region_Child"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="B09_Region_Child"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Region_Child_NameProperty, dr.GetString("Region_Child_Name"));
// parent properties
region_ID1 = dr.GetInt32("Region_ID1");
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Inserts a new <see cref="B09_Region_Child"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(B08_Region parent)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("AddB09_Region_Child", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Region_ID1", parent.Region_ID).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@Region_Child_Name", ReadProperty(Region_Child_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnInsertPre(args);
cmd.ExecuteNonQuery();
OnInsertPost(args);
}
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="B09_Region_Child"/> object.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update(B08_Region parent)
{
if (!IsDirty)
return;
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("UpdateB09_Region_Child", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Region_ID1", parent.Region_ID).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@Region_Child_Name", ReadProperty(Region_Child_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnUpdatePre(args);
cmd.ExecuteNonQuery();
OnUpdatePost(args);
}
}
}
/// <summary>
/// Self deletes the <see cref="B09_Region_Child"/> object from database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf(B08_Region parent)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("DeleteB09_Region_Child", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Region_ID1", parent.Region_ID).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd);
OnDeletePre(args);
cmd.ExecuteNonQuery();
OnDeletePost(args);
}
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
/*
* Copyright 2021 Google LLC All Rights Reserved.
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file or at
* https://developers.google.com/open-source/licenses/bsd
*/
// <auto-generated>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/system_parameter.proto
// </auto-generated>
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Api {
/// <summary>Holder for reflection information generated from google/api/system_parameter.proto</summary>
public static partial class SystemParameterReflection {
#region Descriptor
/// <summary>File descriptor for google/api/system_parameter.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static SystemParameterReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CiFnb29nbGUvYXBpL3N5c3RlbV9wYXJhbWV0ZXIucHJvdG8SCmdvb2dsZS5h",
"cGkiQgoQU3lzdGVtUGFyYW1ldGVycxIuCgVydWxlcxgBIAMoCzIfLmdvb2ds",
"ZS5hcGkuU3lzdGVtUGFyYW1ldGVyUnVsZSJYChNTeXN0ZW1QYXJhbWV0ZXJS",
"dWxlEhAKCHNlbGVjdG9yGAEgASgJEi8KCnBhcmFtZXRlcnMYAiADKAsyGy5n",
"b29nbGUuYXBpLlN5c3RlbVBhcmFtZXRlciJRCg9TeXN0ZW1QYXJhbWV0ZXIS",
"DAoEbmFtZRgBIAEoCRITCgtodHRwX2hlYWRlchgCIAEoCRIbChN1cmxfcXVl",
"cnlfcGFyYW1ldGVyGAMgASgJQnYKDmNvbS5nb29nbGUuYXBpQhRTeXN0ZW1Q",
"YXJhbWV0ZXJQcm90b1ABWkVnb29nbGUuZ29sYW5nLm9yZy9nZW5wcm90by9n",
"b29nbGVhcGlzL2FwaS9zZXJ2aWNlY29uZmlnO3NlcnZpY2Vjb25maWeiAgRH",
"QVBJYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(null, null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.SystemParameters), global::Google.Api.SystemParameters.Parser, new[]{ "Rules" }, null, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.SystemParameterRule), global::Google.Api.SystemParameterRule.Parser, new[]{ "Selector", "Parameters" }, null, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.SystemParameter), global::Google.Api.SystemParameter.Parser, new[]{ "Name", "HttpHeader", "UrlQueryParameter" }, null, null, null, null)
}));
}
#endregion
}
#region Messages
/// <summary>
/// ### System parameter configuration
///
/// A system parameter is a special kind of parameter defined by the API
/// system, not by an individual API. It is typically mapped to an HTTP header
/// and/or a URL query parameter. This configuration specifies which methods
/// change the names of the system parameters.
/// </summary>
public sealed partial class SystemParameters : pb::IMessage<SystemParameters>
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
, pb::IBufferMessage
#endif
{
private static readonly pb::MessageParser<SystemParameters> _parser = new pb::MessageParser<SystemParameters>(() => new SystemParameters());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<SystemParameters> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Api.SystemParameterReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameters() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameters(SystemParameters other) : this() {
rules_ = other.rules_.Clone();
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameters Clone() {
return new SystemParameters(this);
}
/// <summary>Field number for the "rules" field.</summary>
public const int RulesFieldNumber = 1;
private static readonly pb::FieldCodec<global::Google.Api.SystemParameterRule> _repeated_rules_codec
= pb::FieldCodec.ForMessage(10, global::Google.Api.SystemParameterRule.Parser);
private readonly pbc::RepeatedField<global::Google.Api.SystemParameterRule> rules_ = new pbc::RepeatedField<global::Google.Api.SystemParameterRule>();
/// <summary>
/// Define system parameters.
///
/// The parameters defined here will override the default parameters
/// implemented by the system. If this field is missing from the service
/// config, default system parameters will be used. Default system parameters
/// and names is implementation-dependent.
///
/// Example: define api key for all methods
///
/// system_parameters
/// rules:
/// - selector: "*"
/// parameters:
/// - name: api_key
/// url_query_parameter: api_key
///
/// Example: define 2 api key names for a specific method.
///
/// system_parameters
/// rules:
/// - selector: "/ListShelves"
/// parameters:
/// - name: api_key
/// http_header: Api-Key1
/// - name: api_key
/// http_header: Api-Key2
///
/// **NOTE:** All service configuration rules follow "last one wins" order.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::Google.Api.SystemParameterRule> Rules {
get { return rules_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as SystemParameters);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(SystemParameters other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!rules_.Equals(other.rules_)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= rules_.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
output.WriteRawMessage(this);
#else
rules_.WriteTo(output, _repeated_rules_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) {
rules_.WriteTo(ref output, _repeated_rules_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(ref output);
}
}
#endif
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += rules_.CalculateSize(_repeated_rules_codec);
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(SystemParameters other) {
if (other == null) {
return;
}
rules_.Add(other.rules_);
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
input.ReadRawMessage(this);
#else
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
rules_.AddEntriesFrom(input, _repeated_rules_codec);
break;
}
}
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input);
break;
case 10: {
rules_.AddEntriesFrom(ref input, _repeated_rules_codec);
break;
}
}
}
}
#endif
}
/// <summary>
/// Define a system parameter rule mapping system parameter definitions to
/// methods.
/// </summary>
public sealed partial class SystemParameterRule : pb::IMessage<SystemParameterRule>
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
, pb::IBufferMessage
#endif
{
private static readonly pb::MessageParser<SystemParameterRule> _parser = new pb::MessageParser<SystemParameterRule>(() => new SystemParameterRule());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<SystemParameterRule> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Api.SystemParameterReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameterRule() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameterRule(SystemParameterRule other) : this() {
selector_ = other.selector_;
parameters_ = other.parameters_.Clone();
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameterRule Clone() {
return new SystemParameterRule(this);
}
/// <summary>Field number for the "selector" field.</summary>
public const int SelectorFieldNumber = 1;
private string selector_ = "";
/// <summary>
/// Selects the methods to which this rule applies. Use '*' to indicate all
/// methods in all APIs.
///
/// Refer to [selector][google.api.DocumentationRule.selector] for syntax details.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Selector {
get { return selector_; }
set {
selector_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "parameters" field.</summary>
public const int ParametersFieldNumber = 2;
private static readonly pb::FieldCodec<global::Google.Api.SystemParameter> _repeated_parameters_codec
= pb::FieldCodec.ForMessage(18, global::Google.Api.SystemParameter.Parser);
private readonly pbc::RepeatedField<global::Google.Api.SystemParameter> parameters_ = new pbc::RepeatedField<global::Google.Api.SystemParameter>();
/// <summary>
/// Define parameters. Multiple names may be defined for a parameter.
/// For a given method call, only one of them should be used. If multiple
/// names are used the behavior is implementation-dependent.
/// If none of the specified names are present the behavior is
/// parameter-dependent.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::Google.Api.SystemParameter> Parameters {
get { return parameters_; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as SystemParameterRule);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(SystemParameterRule other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Selector != other.Selector) return false;
if(!parameters_.Equals(other.parameters_)) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Selector.Length != 0) hash ^= Selector.GetHashCode();
hash ^= parameters_.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
output.WriteRawMessage(this);
#else
if (Selector.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Selector);
}
parameters_.WriteTo(output, _repeated_parameters_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) {
if (Selector.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Selector);
}
parameters_.WriteTo(ref output, _repeated_parameters_codec);
if (_unknownFields != null) {
_unknownFields.WriteTo(ref output);
}
}
#endif
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Selector.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Selector);
}
size += parameters_.CalculateSize(_repeated_parameters_codec);
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(SystemParameterRule other) {
if (other == null) {
return;
}
if (other.Selector.Length != 0) {
Selector = other.Selector;
}
parameters_.Add(other.parameters_);
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
input.ReadRawMessage(this);
#else
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
Selector = input.ReadString();
break;
}
case 18: {
parameters_.AddEntriesFrom(input, _repeated_parameters_codec);
break;
}
}
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input);
break;
case 10: {
Selector = input.ReadString();
break;
}
case 18: {
parameters_.AddEntriesFrom(ref input, _repeated_parameters_codec);
break;
}
}
}
}
#endif
}
/// <summary>
/// Define a parameter's name and location. The parameter may be passed as either
/// an HTTP header or a URL query parameter, and if both are passed the behavior
/// is implementation-dependent.
/// </summary>
public sealed partial class SystemParameter : pb::IMessage<SystemParameter>
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
, pb::IBufferMessage
#endif
{
private static readonly pb::MessageParser<SystemParameter> _parser = new pb::MessageParser<SystemParameter>(() => new SystemParameter());
private pb::UnknownFieldSet _unknownFields;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<SystemParameter> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Api.SystemParameterReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameter() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameter(SystemParameter other) : this() {
name_ = other.name_;
httpHeader_ = other.httpHeader_;
urlQueryParameter_ = other.urlQueryParameter_;
_unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public SystemParameter Clone() {
return new SystemParameter(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
/// <summary>
/// Define the name of the parameter, such as "api_key" . It is case sensitive.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "http_header" field.</summary>
public const int HttpHeaderFieldNumber = 2;
private string httpHeader_ = "";
/// <summary>
/// Define the HTTP header name to use for the parameter. It is case
/// insensitive.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string HttpHeader {
get { return httpHeader_; }
set {
httpHeader_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "url_query_parameter" field.</summary>
public const int UrlQueryParameterFieldNumber = 3;
private string urlQueryParameter_ = "";
/// <summary>
/// Define the URL query parameter name to use for the parameter. It is case
/// sensitive.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string UrlQueryParameter {
get { return urlQueryParameter_; }
set {
urlQueryParameter_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as SystemParameter);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(SystemParameter other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
if (HttpHeader != other.HttpHeader) return false;
if (UrlQueryParameter != other.UrlQueryParameter) return false;
return Equals(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
if (HttpHeader.Length != 0) hash ^= HttpHeader.GetHashCode();
if (UrlQueryParameter.Length != 0) hash ^= UrlQueryParameter.GetHashCode();
if (_unknownFields != null) {
hash ^= _unknownFields.GetHashCode();
}
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
output.WriteRawMessage(this);
#else
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (HttpHeader.Length != 0) {
output.WriteRawTag(18);
output.WriteString(HttpHeader);
}
if (UrlQueryParameter.Length != 0) {
output.WriteRawTag(26);
output.WriteString(UrlQueryParameter);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(output);
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (HttpHeader.Length != 0) {
output.WriteRawTag(18);
output.WriteString(HttpHeader);
}
if (UrlQueryParameter.Length != 0) {
output.WriteRawTag(26);
output.WriteString(UrlQueryParameter);
}
if (_unknownFields != null) {
_unknownFields.WriteTo(ref output);
}
}
#endif
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
if (HttpHeader.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(HttpHeader);
}
if (UrlQueryParameter.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(UrlQueryParameter);
}
if (_unknownFields != null) {
size += _unknownFields.CalculateSize();
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(SystemParameter other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
if (other.HttpHeader.Length != 0) {
HttpHeader = other.HttpHeader;
}
if (other.UrlQueryParameter.Length != 0) {
UrlQueryParameter = other.UrlQueryParameter;
}
_unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
input.ReadRawMessage(this);
#else
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input);
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
HttpHeader = input.ReadString();
break;
}
case 26: {
UrlQueryParameter = input.ReadString();
break;
}
}
}
#endif
}
#if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
_unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input);
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
HttpHeader = input.ReadString();
break;
}
case 26: {
UrlQueryParameter = input.ReadString();
break;
}
}
}
}
#endif
}
#endregion
}
#endregion Designer generated code
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
using System.Collections.Generic;
using SysDiag = System.Diagnostics;
using System.IO;
using UnityEngine;
namespace HoloToolkit.Unity.SpatialMapping
{
/// <summary>
/// SimpleMeshSerializer converts a UnityEngine.Mesh object to and from an array of bytes.
/// This class saves minimal mesh data (vertices and triangle indices) in the following format:
/// File header: vertex count (32 bit integer), triangle count (32 bit integer)
/// Vertex list: vertex.x, vertex.y, vertex.z (all 32 bit float)
/// Triangle index list: 32 bit integers
/// </summary>
public static class SimpleMeshSerializer
{
/// <summary>
/// The mesh header consists of two 32 bit integers.
/// </summary>
private static int HeaderSize = sizeof(int) * 2;
/// <summary>
/// Serializes a list of Mesh objects into a byte array.
/// </summary>
/// <param name="meshes">List of Mesh objects to be serialized.</param>
/// <returns>Binary representation of the Mesh objects.</returns>
public static byte[] Serialize(IEnumerable<Mesh> meshes)
{
byte[] data;
using (MemoryStream stream = new MemoryStream())
{
using (BinaryWriter writer = new BinaryWriter(stream))
{
foreach (Mesh mesh in meshes)
{
WriteMesh(writer, mesh);
}
stream.Position = 0;
data = new byte[stream.Length];
stream.Read(data, 0, data.Length);
}
}
return data;
}
/// <summary>
/// Serializes a list of MeshFilter objects into a byte array.
/// Transforms vertices into world space before writing to the file.
/// </summary>
/// <param name="meshes">List of MeshFilter objects to be serialized.</param>
/// <returns>Binary representation of the Mesh objects.</returns>
public static byte[] Serialize(IEnumerable<MeshFilter> meshes)
{
byte[] data = null;
using (MemoryStream stream = new MemoryStream())
{
using (BinaryWriter writer = new BinaryWriter(stream))
{
foreach (MeshFilter meshFilter in meshes)
{
WriteMesh(writer, meshFilter.sharedMesh, meshFilter.transform);
}
stream.Position = 0;
data = new byte[stream.Length];
stream.Read(data, 0, data.Length);
}
}
return data;
}
/// <summary>
/// Deserializes a list of Mesh objects from the provided byte array.
/// </summary>
/// <param name="data">Binary data to be deserialized into a list of Mesh objects.</param>
/// <returns>List of Mesh objects.</returns>
public static IEnumerable<Mesh> Deserialize(byte[] data)
{
List<Mesh> meshes = new List<Mesh>();
using (MemoryStream stream = new MemoryStream(data))
{
using (BinaryReader reader = new BinaryReader(stream))
{
while (reader.BaseStream.Length - reader.BaseStream.Position >= HeaderSize)
{
meshes.Add(ReadMesh(reader));
}
}
}
return meshes;
}
/// <summary>
/// Writes a Mesh object to the data stream.
/// </summary>
/// <param name="writer">BinaryWriter representing the data stream.</param>
/// <param name="mesh">The Mesh object to be written.</param>
/// <param name="transform">If provided, will transform all vertices into world space before writing.</param>
private static void WriteMesh(BinaryWriter writer, Mesh mesh, Transform transform = null)
{
SysDiag.Debug.Assert(writer != null);
// Write the mesh data.
WriteMeshHeader(writer, mesh.vertexCount, mesh.triangles.Length);
WriteVertices(writer, mesh.vertices, transform);
WriteTriangleIndicies(writer, mesh.triangles);
}
/// <summary>
/// Reads a single Mesh object from the data stream.
/// </summary>
/// <param name="reader">BinaryReader representing the data stream.</param>
/// <returns>Mesh object read from the stream.</returns>
private static Mesh ReadMesh(BinaryReader reader)
{
SysDiag.Debug.Assert(reader != null);
int vertexCount = 0;
int triangleIndexCount = 0;
// Read the mesh data.
ReadMeshHeader(reader, out vertexCount, out triangleIndexCount);
Vector3[] vertices = ReadVertices(reader, vertexCount);
int[] triangleIndices = ReadTriangleIndicies(reader, triangleIndexCount);
// Create the mesh.
Mesh mesh = new Mesh();
mesh.vertices = vertices;
mesh.triangles = triangleIndices;
// Reconstruct the normals from the vertices and triangles.
mesh.RecalculateNormals();
return mesh;
}
/// <summary>
/// Writes a mesh header to the data stream.
/// </summary>
/// <param name="writer">BinaryWriter representing the data stream.</param>
/// <param name="vertexCount">Count of vertices in the mesh.</param>
/// <param name="triangleIndexCount">Count of triangle indices in the mesh.</param>
private static void WriteMeshHeader(BinaryWriter writer, int vertexCount, int triangleIndexCount)
{
SysDiag.Debug.Assert(writer != null);
writer.Write(vertexCount);
writer.Write(triangleIndexCount);
}
/// <summary>
/// Reads a mesh header from the data stream.
/// </summary>
/// <param name="reader">BinaryReader representing the data stream.</param>
/// <param name="vertexCount">Count of vertices in the mesh.</param>
/// <param name="triangleIndexCount">Count of triangle indices in the mesh.</param>
private static void ReadMeshHeader(BinaryReader reader, out int vertexCount, out int triangleIndexCount)
{
SysDiag.Debug.Assert(reader != null);
vertexCount = reader.ReadInt32();
triangleIndexCount = reader.ReadInt32();
}
/// <summary>
/// Writes a mesh's vertices to the data stream.
/// </summary>
/// <param name="reader">BinaryReader representing the data stream.</param>
/// <param name="vertices">Array of Vector3 structures representing each vertex.</param>
/// <param name="transform">If provided, will convert all vertices into world space before writing.</param>
private static void WriteVertices(BinaryWriter writer, Vector3[] vertices, Transform transform = null)
{
SysDiag.Debug.Assert(writer != null);
if (transform != null)
{
for (int v = 0, vLength = vertices.Length; v < vLength; ++v)
{
Vector3 vertex = transform.TransformPoint(vertices[v]);
writer.Write(vertex.x);
writer.Write(vertex.y);
writer.Write(vertex.z);
}
}
else
{
foreach (Vector3 vertex in vertices)
{
writer.Write(vertex.x);
writer.Write(vertex.y);
writer.Write(vertex.z);
}
}
}
/// <summary>
/// Reads a mesh's vertices from the data stream.
/// </summary>
/// <param name="reader">BinaryReader representing the data stream.</param>
/// <param name="vertexCount">Count of vertices to read.</param>
/// <returns>Array of Vector3 structures representing the mesh's vertices.</returns>
private static Vector3[] ReadVertices(BinaryReader reader, int vertexCount)
{
SysDiag.Debug.Assert(reader != null);
Vector3[] vertices = new Vector3[vertexCount];
for (int i = 0; i < vertices.Length; i++)
{
vertices[i] = new Vector3(reader.ReadSingle(),
reader.ReadSingle(),
reader.ReadSingle());
}
return vertices;
}
/// <summary>
/// Writes the vertex indices that represent a mesh's triangles to the data stream
/// </summary>
/// <param name="writer">BinaryWriter representing the data stream.</param>
/// <param name="triangleIndices">Array of integers that describe how the vertex indices form triangles.</param>
private static void WriteTriangleIndicies(BinaryWriter writer, int[] triangleIndices)
{
SysDiag.Debug.Assert(writer != null);
foreach (int index in triangleIndices)
{
writer.Write(index);
}
}
/// <summary>
/// Reads the vertex indices that represent a mesh's triangles from the data stream
/// </summary>
/// <param name="reader">BinaryReader representing the data stream.</param>
/// <param name="triangleIndexCount">Count of indices to read.</param>
/// <returns>Array of integers that describe how the vertex indices form triangles.</returns>
private static int[] ReadTriangleIndicies(BinaryReader reader, int triangleIndexCount)
{
SysDiag.Debug.Assert(reader != null);
int[] triangleIndices = new int[triangleIndexCount];
for (int i = 0; i < triangleIndices.Length; i++)
{
triangleIndices[i] = reader.ReadInt32();
}
return triangleIndices;
}
}
}
| |
using System.Collections;
using Iesi.Collections;
using Microsoft.Practices.Unity;
namespace NHibernate.Bytecode.Unity.Tests.ProxyInterface
{
public class Blog
{
private ISet _posts;
private ISet _users;
private int blog_id;
public virtual int BlogID
{
get { return blog_id; }
set { blog_id = value; }
}
private string blog_name;
public virtual string BlogName
{
get { return blog_name; }
set { blog_name = value; }
}
public virtual ISet Posts
{
get { return _posts; }
set { _posts = value; }
}
public virtual ISet Users
{
get { return _users; }
set { _users = value; }
}
[InjectionConstructor]
public Blog()
{
_posts = new HashedSet();
_users = new HashedSet();
}
public Blog(string name)
: this()
{
blog_name = name;
}
}
public class Comment
{
private Comment() { }
public Comment(string text)
: this()
{
_text = text;
}
private int _id;
private int _indexInPost;
private string _text;
private Post _post;
private User commenter;
public User Commenter
{
get { return commenter; }
set { commenter = value; }
}
public virtual int IndexInPost
{
get { return _indexInPost; }
set { _indexInPost = value; }
}
public virtual Post Post
{
get { return _post; }
set { _post = value; }
}
public virtual int CommentId
{
get { return _id; }
set { _id = value; }
}
public virtual string Text
{
get { return _text; }
set { _text = value; }
}
}
public class Post
{
private int post_id;
private Blog _blog;
private string post_title;
private IList _comments;
private ISet categories = new HashedSet();
public ISet Categories
{
get { return categories; }
set { categories = value; }
}
public virtual IList Comments
{
get { return _comments; }
set { _comments = value; }
}
public virtual int PostId
{
get { return post_id; }
set { post_id = value; }
}
public virtual string PostTitle
{
get { return post_title; }
set { post_title = value; }
}
public virtual Blog Blog
{
get { return _blog; }
set { _blog = value; }
}
public Post()
{
_comments = new ArrayList();
}
public Post(string title)
: this()
{
post_title = title;
}
}
public class User
{
private string _userName;
private int _userId;
private ISet _blogs;
public virtual ISet Blogs
{
get { return _blogs; }
set { _blogs = value; }
}
public virtual int UserId
{
get { return _userId; }
set { _userId = value; }
}
public virtual string UserName
{
get { return _userName; }
set { _userName = value; }
}
public User()
{
_blogs = new HashedSet();
}
public User(string name)
: this()
{
_userName = name;
}
}
public class Category
{
private int category_id;
private string name;
private ISet posts = new HashedSet();
public Category() { }
public Category(string name)
{
this.name = name;
}
public int CategoryId
{
get { return category_id; }
set { category_id = value; }
}
public string Name
{
get { return name; }
set { name = value; }
}
public ISet Posts
{
get { return posts; }
set { posts = value; }
}
}
}
| |
//
// AssetBundleManager.cs
//
// This class keeps track of all the downloaded asset bundles.
// It contains functions to add, destroy and unload a bundle
//
// The MIT License (MIT)
//
// Copyright (c) 2013 Niklas Borglund
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
/// <summary>
/// Asset bundle container class that contains an asset bundle and a list of all the objects that's using it
/// </summary>
public class AssetBundleContainer
{
private AssetBundle thisAssetBundle;
private string bundleName; // used for more readable debug messages
private List<GameObject> objectList = new List<GameObject>();
/// <summary>
/// Gets or sets the this asset bundle.
/// </summary>
public AssetBundle ThisAssetBundle
{
get
{
return thisAssetBundle;
}
set
{
thisAssetBundle = value;
}
}
/// <summary>
/// A list with all the object references that uses this assetbundle
/// </summary>
/// <value>
/// The object list.
/// </value>
public List<GameObject> ObjectList
{
get
{
return objectList;
}
}
/// <summary>
/// Determines whether the list with references to this assetbundle is empty.
/// </summary>
/// <returns>
/// <c>true</c> if this instance is list empty; otherwise, <c>false</c>.
/// </returns>
public bool IsListEmpty()
{
if (objectList.Count == 0)
{
return true;
}
else
{
return false;
}
}
/// <summary>
/// Gets or sets the name of the bundle.
/// </summary>
/// <value>
/// The name of the bundle.
/// </value>
public string BundleName
{
get
{
return bundleName;
}
set
{
bundleName = value;
}
}
/// <summary>
/// Clear all objects that are null and not used by the AssetBundle anymore
/// </summary>
/// <returns></returns>
public void ClearEmptyObjects()
{
for(int i = (objectList.Count - 1); i >= 0; i--)
{
//loop through the list until a null object is found and delete it.
if (objectList[i] == null)
{
objectList.RemoveAt(i);
}
}
}
/// <summary>
/// Unloads the assetBundle
/// </summary>
public void Unload()
{
Debug.Log("Objects that holds a reference to " + bundleName + ": " + objectList.Count); //This should always show zero
Debug.Log("Unloading AssetBundle(true):" + bundleName);
thisAssetBundle.Unload(true);
}
}
public class AssetBundleManager : MonoBehaviour
{
#region Singleton
private static AssetBundleManager instance = null;
public static AssetBundleManager Instance
{
get
{
if (instance == null) // if the static instance is null, then create an instance of the manager
{
Debug.Log("Creating an AssetBundle manager instance");
GameObject go = new GameObject();
instance = go.AddComponent<AssetBundleManager>();
go.name = "AssetBundleManager";
DontDestroyOnLoad(go);
}
return instance;
}
}
#endregion
private Dictionary<string, AssetBundleContainer> assetBundles = new Dictionary<string, AssetBundleContainer>();
void Start()
{
if (instance == null)
{
instance = this;
DontDestroyOnLoad(this.gameObject);
}
//Check for unused AssetBundles every 5 seconds
InvokeRepeating("CheckForUnusedBundles", 5,5);
}
//Remove and Unload not used asset bundles every 5 seconds(Invoked in Start())
void CheckForUnusedBundles()
{
if (assetBundles.Count > 0)
{
List<string> keysToRemove = new List<string>();
foreach(KeyValuePair<string, AssetBundleContainer> pair in assetBundles)
{
pair.Value.ClearEmptyObjects();
if (pair.Value.IsListEmpty())
{
//Unload the AssetBundle
pair.Value.Unload();
//Add the key to a list for removal
keysToRemove.Add(pair.Key);
}
}
//Delete all the objects in the dicationary with the specified key
foreach(string key in keysToRemove)
{
assetBundles.Remove(key);
}
}
}
/// <summary>
/// Adds the bundle for removal management, if no gameobjects are using the assetbundle it will be
/// removed automatically(if you use this method for all objects created from asset bundles)
/// </summary>
public void AddBundle(string bundleName, AssetBundle assetBundle, GameObject instantiatedObject)
{
//Check if the assetbundle already has a container in the dictionary
if (!assetBundles.ContainsKey(bundleName))
{
//Create a new container and store the referenced game object
AssetBundleContainer bundleContainer = new AssetBundleContainer();
bundleContainer.ThisAssetBundle = assetBundle;
bundleContainer.ObjectList.Add(instantiatedObject);
bundleContainer.BundleName = bundleName;
assetBundles.Add(bundleName, bundleContainer);
}
else
{
//if the key exists, get the container and add the referenced object to its list.
AssetBundleContainer bundleContainer = null;
assetBundles.TryGetValue(bundleName, out bundleContainer);
if (bundleContainer != null)
{
bundleContainer.ObjectList.Add(instantiatedObject);
}
else
{
Debug.LogError("AssetBundleManager.cs: Couldn't get the container for assetbundle: " + bundleName + ". " +
"Removal Management for object:" + instantiatedObject.name + " will not work");
}
}
}
/// <summary>
/// Gets the asset bundle for the specified key.
/// </summary>
/// <returns>
/// The asset bundle.
/// </returns>
/// <param name='bundleName'>
/// Bundle name key.
/// </param>
public AssetBundleContainer GetAssetBundle(string bundleName)
{
AssetBundleContainer thisBundle = null;
assetBundles.TryGetValue(bundleName, out thisBundle);
return thisBundle;
}
/// <summary>
/// Destroys and unloads an asset bundle and all its referenced objects with
/// the specified key.
/// </summary>
/// <param name='bundleName'>
/// Bundle name.
/// </param>
public void DestroyAssetBundle(string bundleName)
{
AssetBundleContainer thisBundle = null;
assetBundles.TryGetValue(bundleName, out thisBundle);
if (thisBundle != null)
{
//Destroy all the game objects that are referencing to this bundle
foreach(GameObject obj in thisBundle.ObjectList)
{
if (obj != null)
{
Destroy(obj);
}
}
thisBundle.ObjectList.Clear();
thisBundle.Unload();
assetBundles.Remove(bundleName);
}
}
/// <summary>
/// Destroy and unload all asset bundles at once and all of their referenced objects.
/// </summary>
public void DestroyAllBundles()
{
foreach (KeyValuePair<string, AssetBundleContainer> bundle in assetBundles)
{
foreach (GameObject obj in bundle.Value.ObjectList)
{
//Destroy all the game objects that are referencing to this bundle
if (obj != null)
{
Destroy(obj);
}
}
bundle.Value.ObjectList.Clear();
bundle.Value.Unload();
}
assetBundles.Clear();
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="AspNetHostingPermission.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web {
using System.Security;
using System.Security.Permissions;
using System.Globalization;
//NOTE: While AspNetHostingPermissionAttribute resides in System.DLL,
// no classes from that DLL are able to make declarative usage of AspNetHostingPermission.
[Serializable]
public enum AspNetHostingPermissionLevel
{
None = 100,
Minimal = 200,
Low = 300,
Medium = 400,
High = 500,
Unrestricted = 600
}
[AttributeUsage(AttributeTargets.All, AllowMultiple=true, Inherited=false )]
[Serializable]
sealed public class AspNetHostingPermissionAttribute : CodeAccessSecurityAttribute
{
AspNetHostingPermissionLevel _level;
public AspNetHostingPermissionAttribute ( SecurityAction action ) : base( action ) {
_level = AspNetHostingPermissionLevel.None;
}
public AspNetHostingPermissionLevel Level {
get {
return _level;
}
set {
AspNetHostingPermission.VerifyAspNetHostingPermissionLevel(value, "Level");
_level = value;
}
}
public override IPermission CreatePermission() {
if (Unrestricted) {
return new AspNetHostingPermission(PermissionState.Unrestricted);
}
else {
return new AspNetHostingPermission(_level);
}
}
}
/// <devdoc>
/// <para>
/// </para>
/// </devdoc>
[Serializable]
public sealed class AspNetHostingPermission : CodeAccessPermission, IUnrestrictedPermission {
AspNetHostingPermissionLevel _level;
static internal void VerifyAspNetHostingPermissionLevel(AspNetHostingPermissionLevel level, string arg) {
switch (level) {
case AspNetHostingPermissionLevel.Unrestricted:
case AspNetHostingPermissionLevel.High:
case AspNetHostingPermissionLevel.Medium:
case AspNetHostingPermissionLevel.Low:
case AspNetHostingPermissionLevel.Minimal:
case AspNetHostingPermissionLevel.None:
break;
default:
throw new ArgumentException(arg);
}
}
/// <devdoc>
/// <para>
/// Creates a new instance of the System.Net.AspNetHostingPermission
/// class that passes all demands or that fails all demands.
/// </para>
/// </devdoc>
public AspNetHostingPermission(PermissionState state) {
switch (state) {
case PermissionState.Unrestricted:
_level = AspNetHostingPermissionLevel.Unrestricted;
break;
case PermissionState.None:
_level = AspNetHostingPermissionLevel.None;
break;
default:
throw new ArgumentException(SR.GetString(SR.InvalidArgument, state.ToString(), "state"));
}
}
public AspNetHostingPermission(AspNetHostingPermissionLevel level) {
VerifyAspNetHostingPermissionLevel(level, "level");
_level = level;
}
public AspNetHostingPermissionLevel Level {
get {
return _level;
}
set {
VerifyAspNetHostingPermissionLevel(value, "Level");
_level = value;
}
}
// IUnrestrictedPermission interface methods
/// <devdoc>
/// <para>
/// Checks the overall permission state of the object.
/// </para>
/// </devdoc>
public bool IsUnrestricted() {
return _level == AspNetHostingPermissionLevel.Unrestricted;
}
// IPermission interface methods
/// <devdoc>
/// <para>
/// Creates a copy of a System.Net.AspNetHostingPermission
/// </para>
/// </devdoc>
public override IPermission Copy () {
return new AspNetHostingPermission(_level);
}
/// <devdoc>
/// <para>Returns the logical union between two System.Net.AspNetHostingPermission instances.</para>
/// </devdoc>
public override IPermission Union(IPermission target) {
if (target == null) {
return Copy();
}
if (target.GetType() != typeof(AspNetHostingPermission)) {
throw new ArgumentException(SR.GetString(SR.InvalidArgument, target == null ? "null" : target.ToString(), "target"));
}
AspNetHostingPermission other = (AspNetHostingPermission) target;
if (Level >= other.Level) {
return new AspNetHostingPermission(Level);
}
else {
return new AspNetHostingPermission(other.Level);
}
}
/// <devdoc>
/// <para>Returns the logical intersection between two System.Net.AspNetHostingPermission instances.</para>
/// </devdoc>
public override IPermission Intersect(IPermission target) {
if (target == null) {
return null;
}
if (target.GetType() != typeof(AspNetHostingPermission)) {
throw new ArgumentException(SR.GetString(SR.InvalidArgument, target == null ? "null" : target.ToString(), "target"));
}
AspNetHostingPermission other = (AspNetHostingPermission) target;
if (Level <= other.Level) {
return new AspNetHostingPermission(Level);
}
else {
return new AspNetHostingPermission(other.Level);
}
}
/// <devdoc>
/// <para>Compares two System.Net.AspNetHostingPermission instances.</para>
/// </devdoc>
public override bool IsSubsetOf(IPermission target) {
if (target == null) {
return _level == AspNetHostingPermissionLevel.None;
}
if (target.GetType() != typeof(AspNetHostingPermission)) {
throw new ArgumentException(SR.GetString(SR.InvalidArgument, target == null ? "null" : target.ToString(), "target"));
}
AspNetHostingPermission other = (AspNetHostingPermission) target;
return Level <= other.Level;
}
/// <devdoc>
/// </devdoc>
public override void FromXml(SecurityElement securityElement) {
if (securityElement == null) {
throw new ArgumentNullException(SR.GetString(SR.AspNetHostingPermissionBadXml,"securityElement"));
}
if (!securityElement.Tag.Equals("IPermission")) {
throw new ArgumentException(SR.GetString(SR.AspNetHostingPermissionBadXml,"securityElement"));
}
string className = securityElement.Attribute("class");
if (className == null) {
throw new ArgumentException(SR.GetString(SR.AspNetHostingPermissionBadXml,"securityElement"));
}
if (className.IndexOf(this.GetType().FullName, StringComparison.Ordinal) < 0) {
throw new ArgumentException(SR.GetString(SR.AspNetHostingPermissionBadXml,"securityElement"));
}
string version = securityElement.Attribute("version");
if (string.Compare(version, "1", StringComparison.OrdinalIgnoreCase) != 0) {
throw new ArgumentException(SR.GetString(SR.AspNetHostingPermissionBadXml,"version"));
}
string level = securityElement.Attribute("Level");
if (level == null) {
_level = AspNetHostingPermissionLevel.None;
}
else {
_level = (AspNetHostingPermissionLevel) Enum.Parse(typeof(AspNetHostingPermissionLevel), level);
}
}
/// <devdoc>
/// <para>[To be supplied.]</para>
/// </devdoc>
public override SecurityElement ToXml() {
SecurityElement securityElement = new SecurityElement("IPermission");
securityElement.AddAttribute("class", this.GetType().FullName + ", " + this.GetType().Module.Assembly.FullName.Replace( '\"', '\'' ));
securityElement.AddAttribute("version", "1" );
securityElement.AddAttribute("Level", Enum.GetName(typeof(AspNetHostingPermissionLevel), _level));
if (IsUnrestricted()) {
securityElement.AddAttribute("Unrestricted", "true");
}
return securityElement;
}
}
}
| |
// Copyright 2010 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using NodaTime.Annotations;
using NodaTime.Utility;
using System;
using System.Diagnostics;
namespace NodaTime.TimeZones
{
/// <summary>
/// Represents a range of time for which a particular Offset applies.
/// </summary>
/// <remarks>
/// <para>
/// Equality is defined component-wise in terms of all properties: the name, the start and end, and the offsets.
/// There is no ordering defined between zone intervals.
/// </para>
/// </remarks>
/// <threadsafety>This type is an immutable reference type. See the thread safety section of the user guide for more information.</threadsafety>
[Immutable]
public sealed class ZoneInterval : IEquatable<ZoneInterval?>
{
/// <summary>
/// Returns the underlying start instant of this zone interval. If the zone interval extends to the
/// beginning of time, the return value will be <see cref="Instant.BeforeMinValue"/>; this value
/// should *not* be exposed publicly.
/// </summary>
internal Instant RawStart { get; }
/// <summary>
/// Returns the underlying end instant of this zone interval. If the zone interval extends to the
/// end of time, the return value will be <see cref="Instant.AfterMaxValue"/>; this value
/// should *not* be exposed publicly.
/// </summary>
internal Instant RawEnd { get; }
private readonly LocalInstant localStart;
private readonly LocalInstant localEnd;
/// <summary>
/// Gets the standard offset for this period. This is the offset without any daylight savings
/// contributions.
/// </summary>
/// <remarks>
/// This is effectively <c>WallOffset - Savings</c>.
/// </remarks>
/// <value>The base Offset.</value>
public Offset StandardOffset
{
[DebuggerStepThrough]
get { return WallOffset - Savings; }
}
/// <summary>
/// Gets the duration of this zone interval.
/// </summary>
/// <remarks>
/// This is effectively <c>End - Start</c>.
/// </remarks>
/// <value>The Duration of this zone interval.</value>
/// <exception cref="InvalidOperationException">This zone extends to the start or end of time.</exception>
public Duration Duration
{
[DebuggerStepThrough]
get { return End - Start; }
}
/// <summary>
/// Returns <c>true</c> if this zone interval has a fixed start point, or <c>false</c> if it
/// extends to the beginning of time.
/// </summary>
/// <value><c>true</c> if this interval has a fixed start point, or <c>false</c> if it
/// extends to the beginning of time.</value>
public bool HasStart => RawStart.IsValid;
/// <summary>
/// Gets the last Instant (exclusive) that the Offset applies.
/// </summary>
/// <value>The last Instant (exclusive) that the Offset applies.</value>
/// <exception cref="InvalidOperationException">The zone interval extends to the end of time</exception>
public Instant End
{
[DebuggerStepThrough]
get
{
Preconditions.CheckState(RawEnd.IsValid, "Zone interval extends to the end of time");
return RawEnd;
}
}
/// <summary>
/// Returns <c>true</c> if this zone interval has a fixed end point, or <c>false</c> if it
/// extends to the end of time.
/// </summary>
/// <value><c>true</c> if this interval has a fixed end point, or <c>false</c> if it
/// extends to the end of time.</value>
public bool HasEnd => RawEnd.IsValid;
// TODO(feature): Consider whether we need some way of checking whether IsoLocalStart/End will throw.
// Clients can check HasStart/HasEnd for infinity, but what about unrepresentable local values?
/// <summary>
/// Gets the local start time of the interval, as a <see cref="LocalDateTime" />
/// in the ISO calendar.
/// </summary>
/// <value>The local start time of the interval in the ISO calendar, with the offset of
/// this zone interval.</value>
/// <exception cref="OverflowException">The interval starts too early to represent as a `LocalDateTime`.</exception>
/// <exception cref="InvalidOperationException">The interval extends to the start of time.</exception>
public LocalDateTime IsoLocalStart
{
// Use the Start property to trigger the appropriate end-of-time exception.
// Call Plus to trigger an appropriate out-of-range exception.
[DebuggerStepThrough]
get { return new LocalDateTime(Start.Plus(WallOffset)); }
}
/// <summary>
/// Gets the local end time of the interval, as a <see cref="LocalDateTime" />
/// in the ISO calendar.
/// </summary>
/// <value>The local end time of the interval in the ISO calendar, with the offset
/// of this zone interval. As the end time is exclusive, by the time this local time
/// is reached, the next interval will be in effect and the local time will usually
/// have changed (e.g. by adding or subtracting an hour).</value>
/// <exception cref="OverflowException">The interval ends too late to represent as a `LocalDateTime`.</exception>
/// <exception cref="InvalidOperationException">The interval extends to the end of time.</exception>
public LocalDateTime IsoLocalEnd
{
[DebuggerStepThrough]
// Use the End property to trigger the appropriate end-of-time exception.
// Call Plus to trigger an appropriate out-of-range exception.
get { return new LocalDateTime(End.Plus(WallOffset)); }
}
/// <summary>
/// Gets the name of this offset period (e.g. PST or PDT).
/// </summary>
/// <value>The name of this offset period (e.g. PST or PDT).</value>
public string Name { [DebuggerStepThrough] get; }
/// <summary>
/// Gets the offset from UTC for this period. This includes any daylight savings value.
/// </summary>
/// <value>The offset from UTC for this period.</value>
public Offset WallOffset { [DebuggerStepThrough] get; }
/// <summary>
/// Gets the daylight savings value for this period.
/// </summary>
/// <value>The savings value.</value>
public Offset Savings { [DebuggerStepThrough] get; }
/// <summary>
/// Gets the first Instant that the Offset applies.
/// </summary>
/// <value>The first Instant that the Offset applies.</value>
public Instant Start
{
[DebuggerStepThrough]
get
{
Preconditions.CheckState(RawStart.IsValid, "Zone interval extends to the beginning of time");
return RawStart;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="ZoneInterval" /> class.
/// </summary>
/// <param name="name">The name of this offset period (e.g. PST or PDT).</param>
/// <param name="start">The first <see cref="Instant" /> that the <paramref name = "wallOffset" /> applies,
/// or <c>null</c> to make the zone interval extend to the start of time.</param>
/// <param name="end">The last <see cref="Instant" /> (exclusive) that the <paramref name = "wallOffset" /> applies,
/// or <c>null</c> to make the zone interval extend to the end of time.</param>
/// <param name="wallOffset">The <see cref="WallOffset" /> from UTC for this period including any daylight savings.</param>
/// <param name="savings">The <see cref="WallOffset" /> daylight savings contribution to the offset.</param>
/// <exception cref="ArgumentException">If <c><paramref name = "start" /> >= <paramref name = "end" /></c>.</exception>
public ZoneInterval(string name, Instant? start, Instant? end, Offset wallOffset, Offset savings)
: this(name, start ?? Instant.BeforeMinValue, end ?? Instant.AfterMaxValue, wallOffset, savings)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="ZoneInterval" /> class.
/// </summary>
/// <param name="name">The name of this offset period (e.g. PST or PDT).</param>
/// <param name="start">The first <see cref="Instant" /> that the <paramref name = "wallOffset" /> applies,
/// or <see cref="Instant.BeforeMinValue"/> to make the zone interval extend to the start of time.</param>
/// <param name="end">The last <see cref="Instant" /> (exclusive) that the <paramref name = "wallOffset" /> applies,
/// or <see cref="Instant.AfterMaxValue"/> to make the zone interval extend to the end of time.</param>
/// <param name="wallOffset">The <see cref="WallOffset" /> from UTC for this period including any daylight savings.</param>
/// <param name="savings">The <see cref="WallOffset" /> daylight savings contribution to the offset.</param>
/// <exception cref="ArgumentException">If <c><paramref name = "start" /> >= <paramref name = "end" /></c>.</exception>
internal ZoneInterval(string name, Instant start, Instant end, Offset wallOffset, Offset savings)
{
Preconditions.CheckNotNull(name, nameof(name));
Preconditions.CheckArgument(start < end, nameof(start), "The start Instant must be less than the end Instant");
this.Name = name;
this.RawStart = start;
this.RawEnd = end;
this.WallOffset = wallOffset;
this.Savings = savings;
// Work out the corresponding local instants, taking care to "go infinite" appropriately.
localStart = start.SafePlus(wallOffset);
localEnd = end.SafePlus(wallOffset);
}
/// <summary>
/// Returns a copy of this zone interval, but with the given start instant.
/// </summary>
internal ZoneInterval WithStart(Instant newStart)
{
return new ZoneInterval(Name, newStart, RawEnd, WallOffset, Savings);
}
/// <summary>
/// Returns a copy of this zone interval, but with the given end instant.
/// </summary>
internal ZoneInterval WithEnd(Instant newEnd)
{
return new ZoneInterval(Name, RawStart, newEnd, WallOffset, Savings);
}
#region Contains
/// <summary>
/// Determines whether this period contains the given Instant in its range.
/// </summary>
/// <remarks>
/// Usually this is half-open, i.e. the end is exclusive, but an interval with an end point of "the end of time"
/// is deemed to be inclusive at the end.
/// </remarks>
/// <param name="instant">The instant to test.</param>
/// <returns>
/// <c>true</c> if this period contains the given Instant in its range; otherwise, <c>false</c>.
/// </returns>
[DebuggerStepThrough]
public bool Contains(Instant instant) => RawStart <= instant && instant < RawEnd;
/// <summary>
/// Determines whether this period contains the given LocalInstant in its range.
/// </summary>
/// <param name="localInstant">The local instant to test.</param>
/// <returns>
/// <c>true</c> if this period contains the given LocalInstant in its range; otherwise, <c>false</c>.
/// </returns>
[DebuggerStepThrough]
internal bool Contains(LocalInstant localInstant) => localStart <= localInstant && localInstant < localEnd;
/// <summary>
/// Returns whether this zone interval has the same offsets and name as another.
/// </summary>
internal bool EqualIgnoreBounds([Trusted] ZoneInterval other)
{
Preconditions.DebugCheckNotNull(other, nameof(other));
return other.WallOffset == WallOffset && other.Savings == Savings && other.Name == Name;
}
#endregion // Contains
#region IEquatable<ZoneInterval> Members
/// <summary>
/// Indicates whether the current object is equal to another object of the same type.
/// See the type documentation for a description of equality semantics.
/// </summary>
/// <returns>
/// true if the current object is equal to the <paramref name = "other" /> parameter; otherwise, false.
/// </returns>
/// <param name="other">An object to compare with this object.</param>
[DebuggerStepThrough]
public bool Equals(ZoneInterval? other)
{
if (other is null)
{
return false;
}
if (ReferenceEquals(this, other))
{
return true;
}
return Name == other.Name && RawStart == other.RawStart && RawEnd == other.RawEnd
&& WallOffset == other.WallOffset && Savings == other.Savings;
}
#endregion
#region object Overrides
/// <summary>
/// Determines whether the specified <see cref="T:System.Object" /> is equal to the current <see cref="T:System.Object" />.
/// See the type documentation for a description of equality semantics.
/// </summary>
/// <returns>
/// <c>true</c> if the specified <see cref="T:System.Object" /> is equal to the current <see cref="T:System.Object" />; otherwise, <c>false</c>.
/// </returns>
/// <param name="obj">The <see cref="T:System.Object" /> to compare with the current <see cref="T:System.Object" />.</param>
/// <filterpriority>2</filterpriority>
[DebuggerStepThrough]
public override bool Equals(object? obj) => Equals(obj as ZoneInterval);
/// <summary>
/// Returns a hash code for this zone interval.
/// See the type documentation for a description of equality semantics.
/// </summary>
/// <returns>A hash code for this zone interval.</returns>
public override int GetHashCode() =>
HashCodeHelper.Initialize()
.Hash(Name)
.Hash(RawStart)
.Hash(RawEnd)
.Hash(WallOffset)
.Hash(Savings)
.Value;
/// <summary>
/// Returns a <see cref="System.String" /> that represents this instance.
/// </summary>
/// <returns>
/// A <see cref="System.String" /> that represents this instance.
/// </returns>
public override string ToString() => $"{Name}: [{RawStart}, {RawEnd}) {WallOffset} ({Savings})";
#endregion // object Overrides
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Reflection;
using log4net;
using Nini.Config;
using OpenMetaverse;
using OpenMetaverse.StructuredData;
using Mono.Addins;
using OpenSim.Framework;
using OpenSim.Framework.Servers;
using OpenSim.Framework.Servers.HttpServer;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using Caps=OpenSim.Framework.Capabilities.Caps;
namespace OpenSim.Region.ClientStack.Linden
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "ObjectAdd")]
public class ObjectAdd : INonSharedRegionModule
{
// private static readonly ILog m_log =
// LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private Scene m_scene;
#region INonSharedRegionModule Members
public void Initialise(IConfigSource pSource)
{
}
public void AddRegion(Scene scene)
{
m_scene = scene;
m_scene.EventManager.OnRegisterCaps += RegisterCaps;
}
public void RemoveRegion(Scene scene)
{
if (m_scene == scene)
{
m_scene.EventManager.OnRegisterCaps -= RegisterCaps;
m_scene = null;
}
}
public void RegionLoaded(Scene scene)
{
}
public void Close()
{
}
public string Name
{
get { return "ObjectAddModule"; }
}
public Type ReplaceableInterface
{
get { return null; }
}
#endregion
public void RegisterCaps(UUID agentID, Caps caps)
{
UUID capuuid = UUID.Random();
// m_log.InfoFormat("[OBJECTADD]: {0}", "/CAPS/OA/" + capuuid + "/");
caps.RegisterHandler(
"ObjectAdd",
new RestHTTPHandler(
"POST",
"/CAPS/OA/" + capuuid + "/",
httpMethod => ProcessAdd(httpMethod, agentID, caps),
"ObjectAdd",
agentID.ToString())); ;
}
public Hashtable ProcessAdd(Hashtable request, UUID AgentId, Caps cap)
{
Hashtable responsedata = new Hashtable();
responsedata["int_response_code"] = 400; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["keepalive"] = false;
responsedata["str_response_string"] = "Request wasn't what was expected";
ScenePresence avatar;
if (!m_scene.TryGetScenePresence(AgentId, out avatar))
return responsedata;
OSD r = OSDParser.DeserializeLLSDXml((string)request["requestbody"]);
//UUID session_id = UUID.Zero;
bool bypass_raycast = false;
uint everyone_mask = 0;
uint group_mask = 0;
uint next_owner_mask = 0;
uint flags = 0;
UUID group_id = UUID.Zero;
int hollow = 0;
int material = 0;
int p_code = 0;
int path_begin = 0;
int path_curve = 0;
int path_end = 0;
int path_radius_offset = 0;
int path_revolutions = 0;
int path_scale_x = 0;
int path_scale_y = 0;
int path_shear_x = 0;
int path_shear_y = 0;
int path_skew = 0;
int path_taper_x = 0;
int path_taper_y = 0;
int path_twist = 0;
int path_twist_begin = 0;
int profile_begin = 0;
int profile_curve = 0;
int profile_end = 0;
Vector3 ray_end = Vector3.Zero;
bool ray_end_is_intersection = false;
Vector3 ray_start = Vector3.Zero;
UUID ray_target_id = UUID.Zero;
Quaternion rotation = Quaternion.Identity;
Vector3 scale = Vector3.Zero;
int state = 0;
int lastattach = 0;
if (r.Type != OSDType.Map) // not a proper req
return responsedata;
OSDMap rm = (OSDMap)r;
if (rm.ContainsKey("ObjectData")) //v2
{
if (rm["ObjectData"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has ObjectData key, but data not in expected format";
return responsedata;
}
OSDMap ObjMap = (OSDMap)rm["ObjectData"];
bypass_raycast = ObjMap["BypassRaycast"].AsBoolean();
everyone_mask = readuintval(ObjMap["EveryoneMask"]);
flags = readuintval(ObjMap["Flags"]);
group_mask = readuintval(ObjMap["GroupMask"]);
material = ObjMap["Material"].AsInteger();
next_owner_mask = readuintval(ObjMap["NextOwnerMask"]);
p_code = ObjMap["PCode"].AsInteger();
if (ObjMap.ContainsKey("Path"))
{
if (ObjMap["Path"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has Path key, but data not in expected format";
return responsedata;
}
OSDMap PathMap = (OSDMap)ObjMap["Path"];
path_begin = PathMap["Begin"].AsInteger();
path_curve = PathMap["Curve"].AsInteger();
path_end = PathMap["End"].AsInteger();
path_radius_offset = PathMap["RadiusOffset"].AsInteger();
path_revolutions = PathMap["Revolutions"].AsInteger();
path_scale_x = PathMap["ScaleX"].AsInteger();
path_scale_y = PathMap["ScaleY"].AsInteger();
path_shear_x = PathMap["ShearX"].AsInteger();
path_shear_y = PathMap["ShearY"].AsInteger();
path_skew = PathMap["Skew"].AsInteger();
path_taper_x = PathMap["TaperX"].AsInteger();
path_taper_y = PathMap["TaperY"].AsInteger();
path_twist = PathMap["Twist"].AsInteger();
path_twist_begin = PathMap["TwistBegin"].AsInteger();
}
if (ObjMap.ContainsKey("Profile"))
{
if (ObjMap["Profile"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has Profile key, but data not in expected format";
return responsedata;
}
OSDMap ProfileMap = (OSDMap)ObjMap["Profile"];
profile_begin = ProfileMap["Begin"].AsInteger();
profile_curve = ProfileMap["Curve"].AsInteger();
profile_end = ProfileMap["End"].AsInteger();
hollow = ProfileMap["Hollow"].AsInteger();
}
ray_end_is_intersection = ObjMap["RayEndIsIntersection"].AsBoolean();
ray_target_id = ObjMap["RayTargetId"].AsUUID();
state = ObjMap["State"].AsInteger();
lastattach = ObjMap["LastAttachPoint"].AsInteger();
try
{
ray_end = ((OSDArray)ObjMap["RayEnd"]).AsVector3();
ray_start = ((OSDArray)ObjMap["RayStart"]).AsVector3();
scale = ((OSDArray)ObjMap["Scale"]).AsVector3();
rotation = ((OSDArray)ObjMap["Rotation"]).AsQuaternion();
}
catch (Exception)
{
responsedata["str_response_string"] = "RayEnd, RayStart, Scale or Rotation wasn't in the expected format";
return responsedata;
}
if (rm.ContainsKey("AgentData"))
{
if (rm["AgentData"].Type != OSDType.Map)
{
responsedata["str_response_string"] = "Has AgentData key, but data not in expected format";
return responsedata;
}
OSDMap AgentDataMap = (OSDMap)rm["AgentData"];
//session_id = AgentDataMap["SessionId"].AsUUID();
group_id = AgentDataMap["GroupId"].AsUUID();
}
}
else
{ //v1
bypass_raycast = rm["bypass_raycast"].AsBoolean();
everyone_mask = readuintval(rm["everyone_mask"]);
flags = readuintval(rm["flags"]);
group_id = rm["group_id"].AsUUID();
group_mask = readuintval(rm["group_mask"]);
hollow = rm["hollow"].AsInteger();
material = rm["material"].AsInteger();
next_owner_mask = readuintval(rm["next_owner_mask"]);
hollow = rm["hollow"].AsInteger();
p_code = rm["p_code"].AsInteger();
path_begin = rm["path_begin"].AsInteger();
path_curve = rm["path_curve"].AsInteger();
path_end = rm["path_end"].AsInteger();
path_radius_offset = rm["path_radius_offset"].AsInteger();
path_revolutions = rm["path_revolutions"].AsInteger();
path_scale_x = rm["path_scale_x"].AsInteger();
path_scale_y = rm["path_scale_y"].AsInteger();
path_shear_x = rm["path_shear_x"].AsInteger();
path_shear_y = rm["path_shear_y"].AsInteger();
path_skew = rm["path_skew"].AsInteger();
path_taper_x = rm["path_taper_x"].AsInteger();
path_taper_y = rm["path_taper_y"].AsInteger();
path_twist = rm["path_twist"].AsInteger();
path_twist_begin = rm["path_twist_begin"].AsInteger();
profile_begin = rm["profile_begin"].AsInteger();
profile_curve = rm["profile_curve"].AsInteger();
profile_end = rm["profile_end"].AsInteger();
ray_end_is_intersection = rm["ray_end_is_intersection"].AsBoolean();
ray_target_id = rm["ray_target_id"].AsUUID();
//session_id = rm["session_id"].AsUUID();
state = rm["state"].AsInteger();
lastattach = rm["last_attach_point"].AsInteger();
try
{
ray_end = ((OSDArray)rm["ray_end"]).AsVector3();
ray_start = ((OSDArray)rm["ray_start"]).AsVector3();
rotation = ((OSDArray)rm["rotation"]).AsQuaternion();
scale = ((OSDArray)rm["scale"]).AsVector3();
}
catch (Exception)
{
responsedata["str_response_string"] = "RayEnd, RayStart, Scale or Rotation wasn't in the expected format";
return responsedata;
}
}
Vector3 pos = m_scene.GetNewRezLocation(ray_start, ray_end, ray_target_id, rotation, (bypass_raycast) ? (byte)1 : (byte)0, (ray_end_is_intersection) ? (byte)1 : (byte)0, true, scale, false);
PrimitiveBaseShape pbs = PrimitiveBaseShape.CreateBox();
pbs.PathBegin = (ushort)path_begin;
pbs.PathCurve = (byte)path_curve;
pbs.PathEnd = (ushort)path_end;
pbs.PathRadiusOffset = (sbyte)path_radius_offset;
pbs.PathRevolutions = (byte)path_revolutions;
pbs.PathScaleX = (byte)path_scale_x;
pbs.PathScaleY = (byte)path_scale_y;
pbs.PathShearX = (byte)path_shear_x;
pbs.PathShearY = (byte)path_shear_y;
pbs.PathSkew = (sbyte)path_skew;
pbs.PathTaperX = (sbyte)path_taper_x;
pbs.PathTaperY = (sbyte)path_taper_y;
pbs.PathTwist = (sbyte)path_twist;
pbs.PathTwistBegin = (sbyte)path_twist_begin;
pbs.HollowShape = (HollowShape)hollow;
pbs.PCode = (byte)p_code;
pbs.ProfileBegin = (ushort)profile_begin;
pbs.ProfileCurve = (byte)profile_curve;
pbs.ProfileEnd = (ushort)profile_end;
pbs.Scale = scale;
pbs.State = (byte)state;
pbs.LastAttachPoint = (byte)lastattach;
SceneObjectGroup obj = null; ;
if (m_scene.Permissions.CanRezObject(1, avatar.UUID, pos))
{
// rez ON the ground, not IN the ground
// pos.Z += 0.25F;
obj = m_scene.AddNewPrim(avatar.UUID, group_id, pos, rotation, pbs);
}
if (obj == null)
return responsedata;
SceneObjectPart rootpart = obj.RootPart;
rootpart.Shape = pbs;
rootpart.Flags |= (PrimFlags)flags;
rootpart.EveryoneMask = everyone_mask;
rootpart.GroupID = group_id;
rootpart.GroupMask = group_mask;
rootpart.NextOwnerMask = next_owner_mask;
rootpart.Material = (byte)material;
m_scene.PhysicsScene.AddPhysicsActorTaint(rootpart.PhysActor);
responsedata["int_response_code"] = 200; //501; //410; //404;
responsedata["content_type"] = "text/plain";
responsedata["keepalive"] = false;
responsedata["str_response_string"] = String.Format("<llsd><map><key>local_id</key>{0}</map></llsd>", ConvertUintToBytes(obj.LocalId));
return responsedata;
}
private uint readuintval(OSD obj)
{
byte[] tmp = obj.AsBinary();
if (BitConverter.IsLittleEndian)
Array.Reverse(tmp);
return Utils.BytesToUInt(tmp);
}
private string ConvertUintToBytes(uint val)
{
byte[] resultbytes = Utils.UIntToBytes(val);
if (BitConverter.IsLittleEndian)
Array.Reverse(resultbytes);
return String.Format("<binary encoding=\"base64\">{0}</binary>", Convert.ToBase64String(resultbytes));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using MonoTouch.Foundation;
using MonoTouch.UIKit;
using System.Drawing;
using System.Threading.Tasks;
using MonoTouch.CoreGraphics;
using MonoTouch.CoreAnimation;
using TranslateApi;
namespace XamarinStore.iOS
{
public class ProductDetailViewController : UITableViewController
{
public event Action<Product> AddToBasket = delegate {};
Product CurrentProduct;
ProductSize[] sizeOptions;
BottomButtonView BottomView;
ProductColor[] colorOptions;
StringSelectionCell colorCell, sizeCell;
JBKenBurnsView imageView;
UIImage tshirtIcon;
public ProductDetailViewController (Product product)
{
CurrentProduct = product;
Title = CurrentProduct.Name;
LoadProductData ();
TableView.TableFooterView = new UIView (new RectangleF (0, 0, 0, BottomButtonView.Height));
View.AddSubview (BottomView = new BottomButtonView () {
ButtonText = "add_to_basket".t(),
Button = {
Image = (tshirtIcon = UIImage.FromBundle("t-shirt")),
},
ButtonTapped = async () => await addToBasket ()
});
}
async Task addToBasket()
{
var center = BottomView.Button.ConvertPointToView (BottomView.Button.ImageView.Center, NavigationController.View);
var imageView = new UIImageView (tshirtIcon) {
Center = center,
ContentMode = UIViewContentMode.ScaleAspectFill
};
var backgroundView = new UIImageView (UIImage.FromBundle("circle")) {
Center = center,
};
NavigationController.View.AddSubview (backgroundView);
NavigationController.View.AddSubview (imageView);
await Task.WhenAll (new [] {
animateView (imageView),
animateView (backgroundView),
});
NavigationItem.RightBarButtonItem = AppDelegate.Shared.CreateBasketButton ();
AddToBasket (CurrentProduct);
}
async Task animateView(UIView view)
{
var size = view.Frame.Size;
var grow = new SizeF(size.Width * 1.7f, size.Height * 1.7f);
var shrink = new SizeF(size.Width * .4f, size.Height * .4f);
TaskCompletionSource<bool> tcs = new TaskCompletionSource<bool> ();
//Set the animation path
var pathAnimation = CAKeyFrameAnimation.GetFromKeyPath("position");
pathAnimation.CalculationMode = CAAnimation.AnimationPaced;
pathAnimation.FillMode = CAFillMode.Forwards;
pathAnimation.RemovedOnCompletion = false;
pathAnimation.Duration = .5;
UIBezierPath path = new UIBezierPath ();
path.MoveTo (view.Center);
path.AddQuadCurveToPoint (new PointF (290, 34), new PointF(view.Center.X,View.Center.Y));
pathAnimation.Path = path.CGPath;
//Set size change
var growAnimation = CABasicAnimation.FromKeyPath("bounds.size");
growAnimation.To = NSValue.FromSizeF (grow);
growAnimation.FillMode = CAFillMode.Forwards;
growAnimation.Duration = .1;
growAnimation.RemovedOnCompletion = false;
var shrinkAnimation = CABasicAnimation.FromKeyPath("bounds.size");
shrinkAnimation.To = NSValue.FromSizeF (shrink);
shrinkAnimation.FillMode = CAFillMode.Forwards;
shrinkAnimation.Duration = .4;
shrinkAnimation.RemovedOnCompletion = false;
shrinkAnimation.BeginTime = .1;
CAAnimationGroup animations = new CAAnimationGroup ();
animations.FillMode = CAFillMode.Forwards;
animations.RemovedOnCompletion = false;
animations.Animations = new CAAnimation[] {
pathAnimation,
growAnimation,
shrinkAnimation,
};
animations.Duration = .5;
animations.AnimationStopped += (sender, e) => {
tcs.TrySetResult(true);
};
view.Layer.AddAnimation (animations,"movetocart");
NSTimer.CreateScheduledTimer (.5, () => view.RemoveFromSuperview ());
await tcs.Task;
}
string[] imageUrls = new string[0];
public void LoadProductData ()
{
// Add spinner while loading data.
TableView.Source = new ProductDetailPageSource (new [] {
new SpinnerCell(),
});
colorOptions = CurrentProduct.Colors;
sizeOptions = CurrentProduct.Sizes;
imageUrls = CurrentProduct.ImageUrls.ToArray().Shuffle();
imageView = new JBKenBurnsView {
Frame = new RectangleF (0, -60, 320, 400),
Images = Enumerable.Range(0,imageUrls.Length).Select(x=> new UIImage()).ToList(),
UserInteractionEnabled = false,
};
loadImages ();
var productDescriptionView = new ProductDescriptionView (CurrentProduct) {
Frame = new RectangleF (0, 0, 320, 120),
};
TableView.TableHeaderView = new UIView(new RectangleF(0,0,imageView.Frame.Width,imageView.Frame.Bottom)){imageView};
var tableItems = new List<UITableViewCell> () {
new CustomViewCell (productDescriptionView),
};
tableItems.AddRange (GetOptionsCells ());
TableView.Source = new ProductDetailPageSource (tableItems.ToArray ());
TableView.ReloadData ();
}
async void loadImages()
{
for (int i = 0; i < imageUrls.Length; i++) {
var path = await FileCache.Download (Product.ImageForSize (imageUrls [i], 320 * UIScreen.MainScreen.Scale));
imageView.Images [i] = UIImage.FromFile (path);
}
}
IEnumerable<UITableViewCell> GetOptionsCells ()
{
yield return sizeCell = new StringSelectionCell (View) {
Text = "size".t(),
Items = sizeOptions.Select (x => x.Description),
DetailText = CurrentProduct.Size.Description,
SelectionChanged = () => {
var size = sizeOptions [sizeCell.SelectedIndex];
CurrentProduct.Size = size;
}
};
yield return colorCell = new StringSelectionCell (View) {
Text = "color".t(),
Items = colorOptions.Select (x => x.Name),
DetailText = CurrentProduct.Color.Name,
SelectionChanged = () => {
var color = colorOptions [colorCell.SelectedIndex];
CurrentProduct.Color = color;
},
};
}
public override void ViewWillAppear (bool animated)
{
base.ViewWillAppear (animated);
NavigationItem.RightBarButtonItem = AppDelegate.Shared.CreateBasketButton ();
imageView.Animate();
var bottomRow = NSIndexPath.FromRowSection (TableView.NumberOfRowsInSection (0) - 1, 0);
TableView.ScrollToRow (bottomRow,UITableViewScrollPosition.Top, false);
}
public override void ViewDidLayoutSubviews ()
{
base.ViewDidLayoutSubviews ();
var bound = View.Bounds;
bound.Y = bound.Bottom - BottomButtonView.Height;
bound.Height = BottomButtonView.Height;
BottomView.Frame = bound;
}
}
public class ProductDetailPageSource : UITableViewSource
{
UITableViewCell[] tableItems;
public ProductDetailPageSource (UITableViewCell[] items)
{
tableItems = items;
}
public override int RowsInSection (UITableView tableview, int section)
{
return tableItems.Length;
}
public override UITableViewCell GetCell (UITableView tableView, MonoTouch.Foundation.NSIndexPath indexPath)
{
return tableItems [indexPath.Row];
}
public override float GetHeightForRow (UITableView tableView, NSIndexPath indexPath)
{
return tableItems [indexPath.Row].Frame.Height;
}
public override void RowSelected (UITableView tableView, NSIndexPath indexPath)
{
if (tableItems [indexPath.Row] is StringSelectionCell)
((StringSelectionCell)tableItems [indexPath.Row]).Tap ();
tableView.DeselectRow (indexPath, true);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Windows.Storage;
namespace System.IO
{
internal sealed class WinRTFileStream : FileStreamBase
{
private readonly FileAccess _access;
private bool _disposed;
private StorageFile _file;
private readonly Stream _innerStream;
private readonly FileOptions _options;
private static readonly SafeFileHandle s_invalidHandle = new SafeFileHandle(IntPtr.Zero, false);
internal WinRTFileStream(Stream innerStream, StorageFile file, FileAccess access, FileOptions options, FileStream parent)
: base(parent)
{
Debug.Assert(innerStream != null);
Debug.Assert(file != null);
this._access = access;
this._disposed = false;
this._file = file;
this._innerStream = innerStream;
this._options = options;
}
~WinRTFileStream()
{
Dispose(false);
}
#region FileStream members
public override bool IsAsync { get { return true; } }
public override string Name { get { return _file.Name; } }
public override Microsoft.Win32.SafeHandles.SafeFileHandle SafeFileHandle { get { return s_invalidHandle; } }
public override void Flush(bool flushToDisk)
{
// WinRT streams are not buffered, however the WinRT stream will be wrapped in a BufferedStream
// Flush & FlushAsync will flush the internal managed buffer of the BufferedStream wrapper
// The WinRT stream only exposes FlushAsync which flushes to disk.
// The managed Stream adapter does nothing for Flush() and forwards to WinRT for FlushAsync (flushing to disk).
if (flushToDisk)
{
// FlushAsync() will do the write to disk when it hits the WinRT->NetFx adapter
Task flushTask = _innerStream.FlushAsync();
flushTask.Wait();
}
else
{
// Flush doesn't write to disk
_innerStream.Flush();
}
}
#endregion
#region Stream members
#region Properties
public override bool CanRead
{
// WinRT doesn't support write-only streams, override what the stream tells us
// with what was passed in when creating it.
get { return _innerStream.CanRead && (_access & FileAccess.Read) != 0; }
}
public override bool CanSeek
{
get { return _innerStream.CanSeek; }
}
public override bool CanWrite
{
get { return _innerStream.CanWrite; }
}
public override long Length
{
get { return _innerStream.Length; }
}
public override long Position
{
get { return _innerStream.Position; }
set { _innerStream.Position = value; }
}
public override int ReadTimeout
{
get { return _innerStream.ReadTimeout; }
set { _innerStream.ReadTimeout = value; }
}
public override bool CanTimeout
{
get { return _innerStream.CanTimeout; }
}
public override int WriteTimeout
{
get { return _innerStream.WriteTimeout; }
set { _innerStream.WriteTimeout = value; }
}
#endregion Properties
#region Methods
public override Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken)
{
return _innerStream.CopyToAsync(destination, bufferSize, cancellationToken);
}
protected override void Dispose(bool disposing)
{
try
{
if (disposing)
_innerStream.Dispose();
if ((_options & FileOptions.DeleteOnClose) != 0 && _file != null)
{
// WinRT doesn't directly support DeleteOnClose but we can mimick it
// There are a few reasons that this will fail
// 1) the file may not allow delete permissions for the current user
// 2) the storage file RCW may have already been disconnected
try
{
_file.DeleteAsync().AsTask().Wait();
}
catch { }
}
_disposed = true;
_file = null;
}
finally
{
base.Dispose(disposing);
}
}
public override void Flush()
{
_parent.Flush(false);
}
public override Task FlushAsync(CancellationToken cancellationToken)
{
return _innerStream.FlushAsync(cancellationToken);
}
public override int Read(byte[] buffer, int offset, int count)
{
if (!_disposed && !CanRead)
throw Error.GetReadNotSupported();
return _innerStream.Read(buffer, offset, count);
}
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (!_disposed && !CanRead)
throw Error.GetReadNotSupported();
return _innerStream.ReadAsync(buffer, offset, count, cancellationToken);
}
public override int ReadByte()
{
if (!_disposed && !CanRead)
throw Error.GetReadNotSupported();
return _innerStream.ReadByte();
}
public override long Seek(long offset, SeekOrigin origin)
{
if (origin == SeekOrigin.Begin && offset < 0)
throw Win32Marshal.GetExceptionForWin32Error(Interop.mincore.Errors.ERROR_NEGATIVE_SEEK);
return _innerStream.Seek(offset, origin);
}
public override void SetLength(long value)
{
_innerStream.SetLength(value);
// WinRT ignores all errors when setting length, check after setting
if (_innerStream.Length < value)
{
throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_FileLengthTooBig);
}
else if (_innerStream.Length != value)
{
throw new ArgumentException(SR.Argument_FileNotResized, nameof(value));
}
// WinRT doesn't update the position when truncating a file
if (value < _innerStream.Position)
_innerStream.Position = value;
}
public override string ToString()
{
return _innerStream.ToString();
}
public override void Write(byte[] buffer, int offset, int count)
{
_innerStream.Write(buffer, offset, count);
}
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
return _innerStream.WriteAsync(buffer, offset, count, cancellationToken);
}
public override void WriteByte(byte value)
{
_innerStream.WriteByte(value);
}
#endregion Methods
#endregion Stream members
}
}
| |
using UnityEngine;
using UnityEngine.Serialization;
using System.Collections.Generic;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace Anima2D
{
[ExecuteInEditMode]
public class SpriteMeshInstance : MonoBehaviour
{
[SerializeField][FormerlySerializedAs("spriteMesh")]
SpriteMesh m_SpriteMesh;
[SerializeField]
Color m_Color = Color.white;
[SerializeField]
Material[] m_Materials;
[SerializeField]
int m_SortingLayerID = 0;
[SerializeField][FormerlySerializedAs("orderInLayer")]
int m_SortingOrder = 0;
[SerializeField][HideInInspector][FormerlySerializedAs("bones")]
Bone2D[] m_Bones;
[SerializeField][HideInInspector]
Transform[] m_BoneTransforms;
public SpriteMesh spriteMesh {
get { return m_SpriteMesh; }
set { m_SpriteMesh = value; }
}
public Material sharedMaterial {
get {
if(m_Materials.Length > 0)
{
return m_Materials[0];
}
return null;
}
set {
m_Materials = new Material[] { value };
}
}
public Material[] sharedMaterials {
get { return m_Materials; }
set { m_Materials = value; }
}
public Color color {
get { return m_Color; }
set { m_Color = value; }
}
public int sortingLayerID {
get { return m_SortingLayerID; }
set { m_SortingLayerID = value; }
}
public string sortingLayerName {
get {
if(cachedRenderer)
{
return cachedRenderer.sortingLayerName;
}
return "Default";
}
set {
if(cachedRenderer)
{
cachedRenderer.sortingLayerName = value;
sortingLayerID = cachedRenderer.sortingLayerID;
}
}
}
public int sortingOrder {
get { return m_SortingOrder; }
set { m_SortingOrder = value; }
}
List<Bone2D> m_CachedBones = new List<Bone2D>();
public List<Bone2D> bones {
get {
//DEPRECATED: m_Bones
if(m_Bones != null && m_Bones.Length > 0 && m_CachedBones.Count == 0)
{
bones = new List<Bone2D>(m_Bones);
}
if(m_BoneTransforms != null && m_CachedBones.Count != m_BoneTransforms.Length)
{
m_CachedBones = new List<Bone2D>(m_BoneTransforms.Length);
for (int i = 0; i < m_BoneTransforms.Length; i++)
{
Bone2D l_Bone = null;
if(m_BoneTransforms[i])
{
l_Bone = m_BoneTransforms[i].GetComponent<Bone2D>();
}
m_CachedBones.Add(l_Bone);
}
}
for (int i = 0; i < m_CachedBones.Count; i++)
{
if(m_CachedBones[i] && m_BoneTransforms[i] != m_CachedBones[i].transform)
{
m_CachedBones[i] = null;
}
if(!m_CachedBones[i] && m_BoneTransforms[i])
{
m_CachedBones[i] = m_BoneTransforms[i].GetComponent<Bone2D>();
}
}
return m_CachedBones;
}
set {
m_Bones = null;
m_CachedBones = new List<Bone2D>(value);
m_BoneTransforms = new Transform[m_CachedBones.Count];
for (int i = 0; i < m_CachedBones.Count; i++)
{
Bone2D bone = m_CachedBones[i];
if(bone)
{
m_BoneTransforms[i] = bone.transform;
}
}
if(cachedSkinnedRenderer)
{
cachedSkinnedRenderer.bones = m_BoneTransforms;
}
}
}
MaterialPropertyBlock m_MaterialPropertyBlock;
MaterialPropertyBlock materialPropertyBlock {
get {
if(m_MaterialPropertyBlock == null)
{
m_MaterialPropertyBlock = new MaterialPropertyBlock();
}
return m_MaterialPropertyBlock;
}
}
Renderer mCachedRenderer;
public Renderer cachedRenderer {
get {
if(!mCachedRenderer)
{
mCachedRenderer = GetComponent<Renderer>();
}
return mCachedRenderer;
}
}
MeshFilter mCachedMeshFilter;
public MeshFilter cachedMeshFilter {
get {
if(!mCachedMeshFilter)
{
mCachedMeshFilter = GetComponent<MeshFilter>();
}
return mCachedMeshFilter;
}
}
SkinnedMeshRenderer mCachedSkinnedRenderer;
public SkinnedMeshRenderer cachedSkinnedRenderer {
get {
if(!mCachedSkinnedRenderer)
{
mCachedSkinnedRenderer = GetComponent<SkinnedMeshRenderer>();
}
return mCachedSkinnedRenderer;
}
}
Texture2D spriteTexture {
get {
if(spriteMesh && spriteMesh.sprite)
{
return spriteMesh.sprite.texture;
}
return null;
}
}
Mesh m_InitialMesh = null;
Mesh m_CurrentMesh = null;
public Mesh sharedMesh {
get {
if(m_InitialMesh)
{
return m_InitialMesh;
}
return null;
}
}
public Mesh mesh {
get {
if(m_CurrentMesh)
{
return GameObject.Instantiate(m_CurrentMesh);
}
return null;
}
}
#if UNITY_EDITOR
ulong m_AssetTimeStamp = 0;
#endif
void OnDestroy()
{
if(m_CurrentMesh)
{
#if UNITY_EDITOR
if(Application.isPlaying)
{
Destroy(m_CurrentMesh);
}else{
DestroyImmediate(m_CurrentMesh);
}
#else
Destroy(m_CurrentMesh);
#endif
}
}
#if UNITY_EDITOR
void UpdateTimestamp()
{
if(!Application.isPlaying && spriteMesh)
{
m_AssetTimeStamp = AssetImporter.GetAtPath(AssetDatabase.GetAssetPath(spriteMesh)).assetTimeStamp;
}
}
#endif
void Awake()
{
#if UNITY_EDITOR
UpdateTimestamp();
#endif
UpdateCurrentMesh();
}
void UpdateInitialMesh()
{
m_InitialMesh = null;
if(spriteMesh && spriteMesh.sharedMesh)
{
m_InitialMesh = spriteMesh.sharedMesh;
}
}
void UpdateCurrentMesh()
{
UpdateInitialMesh();
if(m_InitialMesh)
{
if(!m_CurrentMesh)
{
m_CurrentMesh = new Mesh();
m_CurrentMesh.hideFlags = HideFlags.DontSave;
m_CurrentMesh.MarkDynamic();
}
m_CurrentMesh.Clear();
m_CurrentMesh.name = m_InitialMesh.name;
m_CurrentMesh.vertices = m_InitialMesh.vertices;
m_CurrentMesh.normals = m_InitialMesh.normals;
m_CurrentMesh.boneWeights = m_InitialMesh.boneWeights;
m_CurrentMesh.bindposes = m_InitialMesh.bindposes;
m_CurrentMesh.uv = m_InitialMesh.uv;
m_CurrentMesh.bounds = m_InitialMesh.bounds;
m_CurrentMesh.colors = m_InitialMesh.colors;
for(int i = 0; i < m_InitialMesh.subMeshCount; ++i)
{
m_CurrentMesh.SetTriangles(m_InitialMesh.GetTriangles(i),i);
}
#if !(UNITY_5_0 || UNITY_5_1 || UNITY_5_2)
m_CurrentMesh.ClearBlendShapes();
for(int i = 0; i < m_InitialMesh.blendShapeCount; ++i)
{
string blendshapeName = m_InitialMesh.GetBlendShapeName(i);
for(int j = 0; j < m_InitialMesh.GetBlendShapeFrameCount(i); ++j)
{
float weight = m_InitialMesh.GetBlendShapeFrameWeight(i,j);
Vector3[] vertices = new Vector3[m_InitialMesh.vertexCount];
m_InitialMesh.GetBlendShapeFrameVertices(i,j,vertices,null,null);
m_CurrentMesh.AddBlendShapeFrame(blendshapeName, weight, vertices, null, null);
}
}
#endif
m_CurrentMesh.hideFlags = HideFlags.DontSave;
}else{
m_InitialMesh = null;
if(m_CurrentMesh)
{
m_CurrentMesh.Clear();
}
}
if(m_CurrentMesh)
{
if(spriteMesh && spriteMesh.sprite && spriteMesh.sprite.packed)
{
SetSpriteUVs(m_CurrentMesh,spriteMesh.sprite);
}
m_CurrentMesh.UploadMeshData(false);
}
UpdateRenderers();
#if UNITY_EDITOR
UpdateTimestamp();
#endif
}
void SetSpriteUVs(Mesh mesh, Sprite sprite)
{
Vector2[] spriteUVs = sprite.uv;
if(mesh.vertexCount == spriteUVs.Length)
{
mesh.uv = sprite.uv;
}
}
void UpdateRenderers()
{
Mesh l_mesh = null;
if(m_InitialMesh)
{
l_mesh = m_CurrentMesh;
}
if(cachedSkinnedRenderer)
{
cachedSkinnedRenderer.sharedMesh = l_mesh;
}else if(cachedMeshFilter)
{
cachedMeshFilter.sharedMesh = l_mesh;
}
}
void LateUpdate()
{
if(!spriteMesh || (spriteMesh && spriteMesh.sharedMesh != m_InitialMesh))
{
UpdateCurrentMesh();
}
#if UNITY_EDITOR
if(!Application.isPlaying && spriteMesh)
{
ulong l_AssetTimeStamp = AssetImporter.GetAtPath(AssetDatabase.GetAssetPath(spriteMesh)).assetTimeStamp;
if(m_AssetTimeStamp != l_AssetTimeStamp)
{
UpdateCurrentMesh();
}
}
#endif
}
void OnWillRenderObject()
{
UpdateRenderers();
if(cachedRenderer)
{
cachedRenderer.sortingLayerID = sortingLayerID;
cachedRenderer.sortingOrder = sortingOrder;
if(m_Materials != null && m_Materials.Length > 0)
{
cachedRenderer.sharedMaterials = m_Materials;
}else if(spriteMesh && spriteMesh.sharedMaterials != null)
{
cachedRenderer.sharedMaterials = spriteMesh.sharedMaterials;
}
if(materialPropertyBlock != null)
{
if(spriteTexture)
{
materialPropertyBlock.SetTexture("_MainTex", spriteTexture);
}
materialPropertyBlock.SetColor("_Color",color);
cachedRenderer.SetPropertyBlock(materialPropertyBlock);
}
}
}
#if UNITY_EDITOR
void OnRenderObject()
{
//Restore materials to preserve previews
if(cachedRenderer && spriteMesh && spriteMesh.sharedMaterials != null)
{
cachedRenderer.sharedMaterials = spriteMesh.sharedMaterials;
}
}
#endif
}
}
| |
/*
* @author Valentin Simonov / http://va.lent.in/
*/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using TouchScript.Hit;
using TouchScript.Layers;
using TouchScript.Utils;
using TouchScript.Utils.Attributes;
using UnityEngine;
namespace TouchScript.Gestures
{
/// <summary>
/// Base class for all gestures
/// </summary>
public abstract class Gesture : MonoBehaviour
{
#region Constants
/// <summary>
/// Message sent when gesture changes state if SendMessage is used.
/// </summary>
public const string STATE_CHANGE_MESSAGE = "OnGestureStateChange";
/// <summary>
/// Possible states of a gesture.
/// </summary>
public enum GestureState
{
/// <summary>
/// Gesture is possible.
/// </summary>
Possible,
/// <summary>
/// Continuous gesture has just begun.
/// </summary>
Began,
/// <summary>
/// Started continuous gesture is updated.
/// </summary>
Changed,
/// <summary>
/// Continuous gesture is ended.
/// </summary>
Ended,
/// <summary>
/// Gesture is cancelled.
/// </summary>
Cancelled,
/// <summary>
/// Gesture is failed by itself or by another recognized gesture.
/// </summary>
Failed,
/// <summary>
/// Gesture is recognized.
/// </summary>
Recognized = Ended
}
#endregion
#region Events
/// <summary>
/// Occurs when gesture changes state.
/// </summary>
public event EventHandler<GestureStateChangeEventArgs> StateChanged
{
add { stateChangedInvoker += value; }
remove { stateChangedInvoker -= value; }
}
// Needed to overcome iOS AOT limitations
private EventHandler<GestureStateChangeEventArgs> stateChangedInvoker;
#endregion
#region Public properties
/// <summary>
/// Gets or sets another gesture which must fail before this gesture can be recognized.
/// </summary>
/// <value>
/// The gesture which must fail before this gesture can be recognized;
/// </value>
public Gesture RequireGestureToFail
{
get { return requireGestureToFail; }
set
{
if (requireGestureToFail != null) requireGestureToFail.StateChanged -= requiredToFailGestureStateChangedHandler;
requireGestureToFail = value;
if (requireGestureToFail != null) requireGestureToFail.StateChanged += requiredToFailGestureStateChangedHandler;
}
}
/// <summary>
/// Gets or sets the flag if touches should be treated as a cluster.
/// </summary>
/// <value><c>true</c> if touches should be treated as a cluster; otherwise, <c>false</c>.</value>
/// <remarks>
/// At the end of a gesture when touches are lifted off due to the fact that computers are faster than humans the very last touch's position will be gesture's <see cref="ScreenPosition"/> after that. This flag is used to combine several touches which from the point of a user were lifted off simultaneously and set their centroid as gesture's <see cref="ScreenPosition"/>.
/// </remarks>
public bool CombineTouches
{
get { return combineTouches; }
set { combineTouches = value; }
}
/// <summary>
/// Gets or sets time interval before gesture is recognized to combine all lifted touch points into a cluster to use its center as <see cref="ScreenPosition"/>.
/// </summary>
/// <value>Time in seconds to treat touches lifted off during this interval as a single gesture.</value>
public float CombineTouchesInterval
{
get { return combineTouchesInterval; }
set { combineTouchesInterval = value; }
}
/// <summary>
/// Gets or sets whether gesture should use Unity's SendMessage in addition to C# events.
/// </summary>
/// <value><c>true</c> if gesture uses SendMessage; otherwise, <c>false</c>.</value>
public bool UseSendMessage
{
get { return useSendMessage; }
set { useSendMessage = value; }
}
/// <summary>
/// Gets or sets a value indicating whether state change events are broadcasted if <see cref="UseSendMessage"/> is true..
/// </summary>
/// <value><c>true</c> if state change events should be broadcaster; otherwise, <c>false</c>.</value>
public bool SendStateChangeMessages
{
get { return sendStateChangeMessages; }
set { sendStateChangeMessages = value; }
}
/// <summary>
/// Gets or sets the target of Unity messages sent from this gesture.
/// </summary>
/// <value>The target of Unity messages.</value>
public GameObject SendMessageTarget
{
get { return sendMessageTarget; }
set
{
sendMessageTarget = value;
if (value == null) sendMessageTarget = gameObject;
}
}
/// <summary>
/// Gets current gesture state.
/// </summary>
/// <value>Current state of the gesture.</value>
public GestureState State
{
get { return state; }
private set
{
PreviousState = state;
state = value;
switch (value)
{
case GestureState.Possible:
onPossible();
break;
case GestureState.Began:
onBegan();
break;
case GestureState.Changed:
onChanged();
break;
case GestureState.Recognized:
onRecognized();
break;
case GestureState.Failed:
onFailed();
break;
case GestureState.Cancelled:
onCancelled();
break;
}
stateChangedInvoker.InvokeHandleExceptions(this, new GestureStateChangeEventArgs(state, PreviousState));
if (useSendMessage && sendStateChangeMessages && SendMessageTarget != null) sendMessageTarget.SendMessage(STATE_CHANGE_MESSAGE, this, SendMessageOptions.DontRequireReceiver);
}
}
/// <summary>
/// Gets previous gesture state.
/// </summary>
/// <value>Previous state of the gesture.</value>
public GestureState PreviousState { get; private set; }
/// <summary>
/// Gets current screen position.
/// </summary>
/// <value>Gesture's position in screen coordinates.</value>
public virtual Vector2 ScreenPosition
{
get
{
if (activeTouches.Count == 0)
{
if (!TouchManager.IsInvalidPosition(cachedScreenPosition)) return cachedScreenPosition;
return TouchManager.INVALID_POSITION;
}
return ClusterUtils.Get2DCenterPosition(activeTouches);
}
}
/// <summary>
/// Gets previous screen position.
/// </summary>
/// <value>Gesture's previous position in screen coordinates.</value>
public virtual Vector2 PreviousScreenPosition
{
get
{
if (activeTouches.Count == 0)
{
if (!TouchManager.IsInvalidPosition(cachedPreviousScreenPosition)) return cachedPreviousScreenPosition;
return TouchManager.INVALID_POSITION;
}
return ClusterUtils.GetPrevious2DCenterPosition(activeTouches);
}
}
/// <summary>
/// Gets normalized screen position.
/// </summary>
/// <value>Gesture's position in normalized screen coordinates.</value>
public Vector2 NormalizedScreenPosition
{
get
{
var position = ScreenPosition;
if (TouchManager.IsInvalidPosition(position)) return TouchManager.INVALID_POSITION;
return new Vector2(position.x / Screen.width, position.y / Screen.height);
}
}
/// <summary>
/// Gets previous screen position.
/// </summary>
/// <value>Gesture's previous position in normalized screen coordinates.</value>
public Vector2 PreviousNormalizedScreenPosition
{
get
{
var position = PreviousScreenPosition;
if (TouchManager.IsInvalidPosition(position)) return TouchManager.INVALID_POSITION;
return new Vector2(position.x / Screen.width, position.y / Screen.height);
}
}
/// <summary>
/// Gets list of gesture's active touch points.
/// </summary>
/// <value>The list of touches owned by this gesture.</value>
public IList<ITouch> ActiveTouches
{
get { return new ReadOnlyCollection<ITouch>(activeTouches); }
}
/// <summary>
/// An object implementing <see cref="IGestureDelegate"/> to be asked for gesture specific actions.
/// </summary>
public IGestureDelegate Delegate { get; set; }
#endregion
#region Private variables
/// <summary>
/// Reference to global GestureManager.
/// </summary>
protected IGestureManager gestureManager
{
// implemented as a property because it returns IGestureManager but we need to reference GestureManagerInstance to access internal methods
get { return gestureManagerInstance; }
}
/// <summary>
/// Reference to global TouchManager.
/// </summary>
protected ITouchManager touchManager { get; private set; }
/// <summary>
/// Touch points the gesture currently owns and works with.
/// </summary>
protected List<ITouch> activeTouches = new List<ITouch>();
/// <summary>
/// Cached transform of the parent object.
/// </summary>
protected Transform cachedTransform;
#pragma warning disable 0169
[SerializeField]
private bool advancedProps; // is used to save if advanced properties are opened or closed
#pragma warning restore 0169
[SerializeField]
[ToggleLeft]
private bool combineTouches = false;
[SerializeField]
private float combineTouchesInterval = .3f;
[SerializeField]
[ToggleLeft]
private bool useSendMessage = false;
[SerializeField]
[ToggleLeft]
private bool sendStateChangeMessages = false;
[SerializeField]
private GameObject sendMessageTarget;
[SerializeField]
[NullToggle]
private Gesture requireGestureToFail;
[SerializeField]
// Serialized list of gestures for Unity IDE.
private List<Gesture> friendlyGestures = new List<Gesture>();
// List of gestures for realtime.
private List<int> friendlyGestureIds = new List<int>();
private TimedSequence<ITouch> touchSequence = new TimedSequence<ITouch>();
private GestureManagerInstance gestureManagerInstance;
private GestureState delayedStateChange = GestureState.Possible;
private bool requiredGestureFailed = false;
private GestureState state = GestureState.Possible;
/// <summary>
/// Cached screen position.
/// Used to keep tap's position which can't be calculated from touch points when the gesture is recognized since all touch points are gone.
/// </summary>
private Vector2 cachedScreenPosition;
/// <summary>
/// Cached previous screen position.
/// Used to keep tap's position which can't be calculated from touch points when the gesture is recognized since all touch points are gone.
/// </summary>
private Vector2 cachedPreviousScreenPosition;
#endregion
#region Public methods
/// <summary>
/// Adds a friendly gesture.
/// </summary>
/// <param name="gesture">The gesture.</param>
public virtual void AddFriendlyGesture(Gesture gesture)
{
if (gesture == null || gesture == this) return;
registerFriendlyGesture(gesture);
gesture.registerFriendlyGesture(this);
}
/// <summary>
/// Checks if a gesture is friendly with this gesture.
/// </summary>
/// <param name="gesture">A gesture to check.</param>
/// <returns>True if gestures are friendly; false otherwise.</returns>
public bool IsFriendly(Gesture gesture)
{
return friendlyGestureIds.Contains(gesture.GetInstanceID());
}
/// <summary>
/// Gets result of casting a ray from gesture touch points' centroid screen position.
/// </summary>
/// <returns>true if ray hits gesture's target; otherwise, false.</returns>
public virtual bool GetTargetHitResult()
{
ITouchHit hit;
return GetTargetHitResult(ScreenPosition, out hit);
}
/// <summary>
/// Gets result of casting a ray from gesture touch points centroid screen position.
/// </summary>
/// <param name="hit">Raycast result</param>
/// <returns>true if ray hits gesture's target; otherwise, false.</returns>
public virtual bool GetTargetHitResult(out ITouchHit hit)
{
return GetTargetHitResult(ScreenPosition, out hit);
}
/// <summary>
/// Gets result of casting a ray from specific screen position.
/// </summary>
/// <param name="position">The position.</param>
/// <returns>true if ray hits gesture's target; otherwise, false.</returns>
public virtual bool GetTargetHitResult(Vector2 position)
{
ITouchHit hit;
return GetTargetHitResult(position, out hit);
}
/// <summary>
/// Gets result of casting a ray from specific screen position.
/// </summary>
/// <param name="position">The position.</param>
/// <param name="hit">Raycast result.</param>
/// <returns>true if ray hits gesture's target; otherwise, false.</returns>
public virtual bool GetTargetHitResult(Vector2 position, out ITouchHit hit)
{
TouchLayer layer = null;
if (!touchManager.GetHitTarget(position, out hit, out layer)) return false;
if (cachedTransform == hit.Transform || hit.Transform.IsChildOf(cachedTransform)) return true;
return false;
}
/// <summary>
/// Determines whether gesture controls a touch point.
/// </summary>
/// <param name="touch">The touch.</param>
/// <returns>
/// <c>true</c> if gesture controls the touch point; otherwise, <c>false</c>.
/// </returns>
public bool HasTouch(ITouch touch)
{
return activeTouches.Contains(touch);
}
/// <summary>
/// Determines whether this instance can prevent the specified gesture.
/// </summary>
/// <param name="gesture">The gesture.</param>
/// <returns>
/// <c>true</c> if this instance can prevent the specified gesture; otherwise, <c>false</c>.
/// </returns>
public virtual bool CanPreventGesture(Gesture gesture)
{
if (Delegate == null)
{
if (gesture.CanBePreventedByGesture(this)) return !IsFriendly(gesture);
return false;
}
return !Delegate.ShouldRecognizeSimultaneously(this, gesture);
}
/// <summary>
/// Determines whether this instance can be prevented by specified gesture.
/// </summary>
/// <param name="gesture">The gesture.</param>
/// <returns>
/// <c>true</c> if this instance can be prevented by specified gesture; otherwise, <c>false</c>.
/// </returns>
public virtual bool CanBePreventedByGesture(Gesture gesture)
{
if (Delegate == null) return !IsFriendly(gesture);
return !Delegate.ShouldRecognizeSimultaneously(this, gesture);
}
/// <summary>
/// Specifies if gesture can receive this specific touch point.
/// </summary>
/// <param name="touch">The touch.</param>
/// <returns><c>true</c> if this touch should be received by the gesture; otherwise, <c>false</c>.</returns>
public virtual bool ShouldReceiveTouch(ITouch touch)
{
if (Delegate == null) return true;
return Delegate.ShouldReceiveTouch(this, touch);
}
/// <summary>
/// Specifies if gesture can begin or recognize.
/// </summary>
/// <returns><c>true</c> if gesture should begin; otherwise, <c>false</c>.</returns>
public virtual bool ShouldBegin()
{
if (Delegate == null) return true;
return Delegate.ShouldBegin(this);
}
#endregion
#region Unity methods
/// <inheritdoc />
protected virtual void Awake()
{
cachedTransform = GetComponent<Transform>();
foreach (var gesture in friendlyGestures)
{
AddFriendlyGesture(gesture);
}
RequireGestureToFail = requireGestureToFail;
}
/// <summary>
/// Unity3d Start handler.
/// </summary>
protected virtual void OnEnable()
{
// TouchManager might be different in another scene
touchManager = TouchManager.Instance;
gestureManagerInstance = GestureManager.Instance as GestureManagerInstance;
if (touchManager == null) Debug.LogError("No TouchManager found! Please add an instance of TouchManager to the scene!");
if (gestureManagerInstance == null) Debug.LogError("No GesturehManager found! Please add an instance of GesturehManager to the scene!");
if (sendMessageTarget == null) sendMessageTarget = gameObject;
Reset();
}
/// <summary>
/// Unity3d OnDisable handler.
/// </summary>
protected virtual void OnDisable()
{
setState(GestureState.Failed);
}
/// <summary>
/// Unity3d OnDestroy handler.
/// </summary>
protected virtual void OnDestroy()
{
var copy = new List<Gesture>(friendlyGestures);
foreach (var gesture in copy)
{
RemoveFriendlyGesture(gesture);
}
RequireGestureToFail = null;
}
#endregion
#region Internal functions
internal void SetState(GestureState value)
{
setState(value);
}
internal void Reset()
{
activeTouches.Clear();
delayedStateChange = GestureState.Possible;
requiredGestureFailed = false;
reset();
}
internal void TouchesBegan(IList<ITouch> touches)
{
activeTouches.AddRange(touches);
touchesBegan(touches);
}
internal void TouchesMoved(IList<ITouch> touches)
{
touchesMoved(touches);
}
internal void TouchesEnded(IList<ITouch> touches)
{
for (var i = 0; i < touches.Count; i++) activeTouches.Remove(touches[i]);
touchesEnded(touches);
}
internal void TouchesCancelled(IList<ITouch> touches)
{
for (var i = 0; i < touches.Count; i++) activeTouches.Remove(touches[i]);
touchesCancelled(touches);
}
internal virtual void RemoveFriendlyGesture(Gesture gesture)
{
if (gesture == null || gesture == this) return;
unregisterFriendlyGesture(gesture);
gesture.unregisterFriendlyGesture(this);
}
#endregion
#region Protected methods
/// <summary>
/// Should the gesture cache this touch to use it later in calculation of <see cref="ScreenPosition"/>.
/// </summary>
/// <param name="value">Touch to cache.</param>
/// <returns><c>true</c> if touch should be cached; <c>false</c> otherwise.</returns>
protected virtual bool shouldCacheTouchPosition(ITouch value)
{
return true;
}
/// <summary>
/// Tries to change gesture state.
/// </summary>
/// <param name="value">New state.</param>
/// <returns><c>true</c> if state was changed; otherwise, <c>false</c>.</returns>
protected bool setState(GestureState value)
{
if (gestureManagerInstance == null) return false;
if (requireGestureToFail != null)
{
switch (value)
{
case GestureState.Recognized:
case GestureState.Began:
if (!requiredGestureFailed)
{
delayedStateChange = value;
return false;
}
break;
case GestureState.Possible:
case GestureState.Failed:
case GestureState.Cancelled:
delayedStateChange = GestureState.Possible;
break;
}
}
var newState = gestureManagerInstance.GestureChangeState(this, value);
State = newState;
return value == newState;
}
#endregion
#region Callbacks
/// <summary>
/// Called when new touches appear.
/// </summary>
/// <param name="touches">The touches.</param>
protected virtual void touchesBegan(IList<ITouch> touches) {}
/// <summary>
/// Called for moved touches.
/// </summary>
/// <param name="touches">The touches.</param>
protected virtual void touchesMoved(IList<ITouch> touches) {}
/// <summary>
/// Called if touches are removed.
/// </summary>
/// <param name="touches">The touches.</param>
protected virtual void touchesEnded(IList<ITouch> touches)
{
if (combineTouches)
{
foreach (var touch in touches)
{
touchSequence.Add(touch);
}
if (activeTouches.Count == 0)
{
// Checking which points were removed in clusterExistenceTime seconds to set their centroid as cached screen position
var cluster = touchSequence.FindElementsLaterThan(Time.time - combineTouchesInterval, shouldCacheTouchPosition);
cachedScreenPosition = ClusterUtils.Get2DCenterPosition(cluster);
cachedPreviousScreenPosition = ClusterUtils.GetPrevious2DCenterPosition(cluster);
}
}
else
{
if (activeTouches.Count == 0)
{
var lastPoint = touches[touches.Count - 1];
if (shouldCacheTouchPosition(lastPoint))
{
cachedScreenPosition = lastPoint.Position;
cachedPreviousScreenPosition = lastPoint.PreviousPosition;
}
else
{
cachedScreenPosition = TouchManager.INVALID_POSITION;
cachedPreviousScreenPosition = TouchManager.INVALID_POSITION;
}
}
}
}
/// <summary>
/// Called when touches are cancelled.
/// </summary>
/// <param name="touches">The touches.</param>
protected virtual void touchesCancelled(IList<ITouch> touches) {}
/// <summary>
/// Called to reset gesture state after it fails or recognizes.
/// </summary>
protected virtual void reset()
{
cachedScreenPosition = TouchManager.INVALID_POSITION;
cachedPreviousScreenPosition = TouchManager.INVALID_POSITION;
}
/// <summary>
/// Called when state is changed to Possible.
/// </summary>
protected virtual void onPossible() {}
/// <summary>
/// Called when state is changed to Began.
/// </summary>
protected virtual void onBegan() {}
/// <summary>
/// Called when state is changed to Changed.
/// </summary>
protected virtual void onChanged() {}
/// <summary>
/// Called when state is changed to Recognized.
/// </summary>
protected virtual void onRecognized() {}
/// <summary>
/// Called when state is changed to Failed.
/// </summary>
protected virtual void onFailed() {}
/// <summary>
/// Called when state is changed to Cancelled.
/// </summary>
protected virtual void onCancelled() {}
#endregion
#region Private functions
private void registerFriendlyGesture(Gesture gesture)
{
if (gesture == null || gesture == this) return;
addFriendlyGestureId(gesture);
if (!friendlyGestures.Contains(gesture)) friendlyGestures.Add(gesture);
}
// Gets also called by the custom inspector.
private void addFriendlyGestureId(Gesture gesture)
{
var id = gesture.GetInstanceID();
if (!friendlyGestureIds.Contains(id)) friendlyGestureIds.Add(id);
}
private void unregisterFriendlyGesture(Gesture gesture)
{
if (gesture == null || gesture == this) return;
removeFriendlyGestureId(gesture);
friendlyGestures.Remove(gesture);
}
// Gets also called by the custom inspector.
private void removeFriendlyGestureId(Gesture gesture)
{
friendlyGestureIds.Remove(gesture.GetInstanceID());
}
#endregion
#region Event handlers
private void requiredToFailGestureStateChangedHandler(object sender, GestureStateChangeEventArgs e)
{
if ((sender as Gesture) != requireGestureToFail) return;
switch (e.State)
{
case GestureState.Failed:
requiredGestureFailed = true;
if (delayedStateChange != GestureState.Possible)
{
setState(delayedStateChange);
}
break;
case GestureState.Began:
case GestureState.Recognized:
case GestureState.Cancelled:
if (state != GestureState.Failed) setState(GestureState.Failed);
break;
}
}
#endregion
}
/// <summary>
/// Event arguments for Gesture events
/// </summary>
public class GestureStateChangeEventArgs : EventArgs
{
/// <summary>
/// Previous gesture state.
/// </summary>
public Gesture.GestureState PreviousState { get; private set; }
/// <summary>
/// Current gesture state.
/// </summary>
public Gesture.GestureState State { get; private set; }
/// <summary>
/// Initializes a new instance of the <see cref="GestureStateChangeEventArgs"/> class.
/// </summary>
/// <param name="state">Current gesture state.</param>
/// <param name="previousState">Previous gesture state.</param>
public GestureStateChangeEventArgs(Gesture.GestureState state, Gesture.GestureState previousState)
{
State = state;
PreviousState = previousState;
}
}
}
| |
using System.Collections.Generic;
using System.Threading.Tasks;
using EmsApi.Dto.V2;
namespace EmsApi.Client.V2.Access
{
/// <summary>
/// Provides access to EMS API "analytics" routes.
/// </summary>
public class AnalyticsAccess : RouteAccess
{
/// <summary>
/// Searches for analytics by name.
/// </summary>
/// <param name="text">
/// The search terms used to find a list of analytics by name.
/// </param>
/// <param name="groupId">
/// An optional group ID to specify where to limit the search. If not specified, all groups are searched.
/// </param>
/// <param name="maxResults">
/// The optional maximum number of matching results to return. If not specified, a default value of 200
/// is used. Use 0 to return all results.
/// </param>
/// <param name="category">
/// The category of analytics to search, including "Full", "Physical" or "Logical". A null value specifies
/// the default analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<IEnumerable<AnalyticInfo>> SearchAsync( string text, string groupId = null, int? maxResults = null, Category category = Category.Full, CallContext context = null )
{
return CallApiTask( api => api.GetAnalytics( text, groupId, maxResults, category, context ) );
}
/// <summary>
/// Searches for analytics by name.
/// </summary>
/// <param name="text">
/// The search terms used to find a list of analytics by name.
/// </param>
/// <param name="groupId">
/// An optional group ID to specify where to limit the search. If not specified, all groups are searched.
/// </param>
/// <param name="maxResults">
/// The optional maximum number of matching results to return. If not specified, a default value of 200
/// is used. Use 0 to return all results.
/// </param>
/// <param name="category">
/// The category of analytics to search, including "Full", "Physical" or "Logical". A null value specifies
/// the default analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual IEnumerable<AnalyticInfo> Search( string text, string groupId = null, int? maxResults = null, Category category = Category.Full, CallContext context = null )
{
return SafeAccessEnumerableTask( SearchAsync( text, groupId, maxResults, category, context ) );
}
/// <summary>
/// Searches for analytics by name for a specific flight.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record to use when searching analytics.
/// </param>
/// <param name="text">
/// The search terms used to find a list of analytics by name.
/// </param>
/// <param name="groupId">
/// An optional group ID to specify where to limit the search. If not specified, all groups are searched.
/// </param>
/// <param name="maxResults">
/// The optional maximum number of matching results to return. If not specified, a default value of 200
/// is used. Use 0 to return all results.
/// </param>
/// <param name="category">
/// The category of analytics to search, including "Full", "Physical" or "Logical". A null value specifies
/// the default analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<IEnumerable<AnalyticInfo>> SearchAsync( int flightId, string text, string groupId = null, int? maxResults = null, Category category = Category.Full, CallContext context = null )
{
return CallApiTask( api => api.GetAnalyticsWithFlight( flightId, text, groupId, maxResults, category, context ) );
}
/// <summary>
/// Searches for analytics by name.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record to use when searching analytics.
/// </param>
/// <param name="text">
/// The search terms used to find a list of analytics by name.
/// </param>
/// <param name="groupId">
/// An optional group ID to specify where to limit the search. If not specified, all groups are searched.
/// </param>
/// <param name="maxResults">
/// The optional maximum number of matching results to return. If not specified, a default value of 200
/// is used. Use 0 to return all results.
/// </param>
/// <param name="category">
/// The category of analytics to search, including "Full", "Physical" or "Logical". A null value specifies
/// the default analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual IEnumerable<AnalyticInfo> Search( int flightId, string text, string groupId = null, int? maxResults = null, Category category = Category.Full, CallContext context = null )
{
return SafeAccessEnumerableTask( SearchAsync( flightId, text, groupId, maxResults, category, context ) );
}
/// <summary>
/// Retrieves metadata information associated with an analytic such as a description or units.
/// </summary>
/// <param name="analyticId">
/// The analytic ID for which data is retrieved. These identifiers are typically obtained from nodes in an analytic group tree.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<AnalyticInfo> GetInfoAsync( string analyticId, CallContext context = null )
{
var analyticIdObj = new AnalyticId { Id = analyticId };
return CallApiTask( api => api.GetAnalyticInfo( analyticIdObj, context ) );
}
/// <summary>
/// Retrieves metadata information associated with an analytic such as a description or units.
/// </summary>
/// <param name="analyticId">
/// The analytic ID for which data is retrieved. These identifiers are typically obtained from nodes in an analytic group tree.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual AnalyticInfo GetInfo( string analyticId, CallContext context = null )
{
return AccessTaskResult( GetInfoAsync( analyticId, context ) );
}
/// <summary>
/// Retrieves metadata information associated with an analytic such as a description or units.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record to use when retrieving the analytic information.
/// </param>
/// <param name="analyticId">
/// The analytic ID for which data is retrieved. These identifiers are typically obtained from nodes in an analytic group tree.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<AnalyticInfo> GetInfoAsync( int flightId, string analyticId, CallContext context = null )
{
var analyticIdObj = new AnalyticId { Id = analyticId };
return CallApiTask( api => api.GetAnalyticInfoWithFlight( flightId, analyticIdObj, context ) );
}
/// <summary>
/// Retrieves metadata information associated with an analytic such as a description or units.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record to use when retrieving the analytic information.
/// </param>
/// <param name="analyticId">
/// The analytic ID for which data is retrieved. These identifiers are typically obtained from nodes in an analytic group tree.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual AnalyticInfo GetInfo( int flightId, string analyticId, CallContext context = null )
{
return AccessTaskResult( GetInfoAsync( flightId, analyticId, context ) );
}
/// <summary>
/// Retrieves the contents of an analytic group, which is a hierarchical tree structure used to organize analytics.
/// </summary>
/// <param name="analyticGroupId">
/// The ID of the group whose contents to retrieve. If not specified, the contents of the root group will be returned.
/// </param>
/// <param name="category">
/// The category of analytics we are interested in. "Full", "Physical" or "Logical". A null value specifies the default
/// analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<AnalyticGroupContents> GetGroupAsync( string analyticGroupId = null, Category category = Category.Full, CallContext context = null )
{
return CallApiTask( api => api.GetAnalyticGroup( analyticGroupId, category, context ) );
}
/// <summary>
/// Retrieves the contents of an analytic group, which is a hierarchical tree structure used to organize analytics.
/// </summary>
/// <param name="analyticGroupId">
/// The ID of the group whose contents to retrieve. If not specified, the contents of the root group will be returned.
/// </param>
/// <param name="category">
/// The category of analytics we are interested in. "Full", "Physical" or "Logical". A null value specifies the default
/// analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual AnalyticGroupContents GetGroup( string analyticGroupId = null, Category category = Category.Full, CallContext context = null )
{
return AccessTaskResult( GetGroupAsync( analyticGroupId, category, context ) );
}
/// <summary>
/// Retrieves the contents of an analytic group, which is a hierarchical tree structure used to organize analytics.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record to use when retrieving the analytic information.
/// </param>
/// <param name="analyticGroupId">
/// The ID of the group whose contents to retrieve. If not specified, the contents of the root group will be returned.
/// </param>
/// <param name="category">
/// The category of analytics we are interested in. "Full", "Physical" or "Logical". A null value specifies the default
/// analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<AnalyticGroupContents> GetGroupAsync( int flightId, string analyticGroupId = null, Category category = Category.Full, CallContext context = null )
{
return CallApiTask( api => api.GetAnalyticGroupWithFlight( flightId, analyticGroupId, category, context ) );
}
/// <summary>
/// Retrieves the contents of an analytic group, which is a hierarchical tree structure used to organize analytics.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record to use when retrieving the analytic information.
/// </param>
/// <param name="analyticGroupId">
/// The ID of the group whose contents to retrieve. If not specified, the contents of the root group will be returned.
/// </param>
/// <param name="category">
/// The category of analytics we are interested in. "Full", "Physical" or "Logical". A null value specifies the default
/// analytic set, which represents the full set of values exposed by the backing EMS system.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual AnalyticGroupContents GetGroup( int flightId, string analyticGroupId = null, Category category = Category.Full, CallContext context = null )
{
return AccessTaskResult( GetGroupAsync( flightId, analyticGroupId, category, context ) );
}
/// <summary>
/// Queries offsets and values in time-series data for a specified flight and analytic.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record for which to query data.
/// </param>
/// <param name="query">
/// The information used to construct a query for which results are returned.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<QueryResult> QueryResultsAsync( int flightId, AnalyticQuery query, CallContext context = null )
{
return CallApiTask( api => api.GetAnalyticResults( flightId, query.Raw, context ) );
}
/// <summary>
/// Queries offsets and values in time-series data for a specified flight and analytic.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record for which to query data.
/// </param>
/// <param name="query">
/// The information used to construct a query for which results are returned.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual QueryResult QueryResults( int flightId, AnalyticQuery query, CallContext context = null )
{
return AccessTaskResult( QueryResultsAsync( flightId, query, context ) );
}
/// <summary>
/// Returns the analytic metadata for a flight.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record for which to retrieve data.
/// </param>
/// <param name="analyticId">
/// The analytic ID (wrapped in double quotes) for which metadata is retrieved.
/// These identifiers are typically obtained from nodes in an analytic group tree.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Task<Metadata> GetMetadataAsync( int flightId, string analyticId, CallContext context = null )
{
return CallApiTask( api => api.GetAnalyticMetadata( flightId, new AnalyticId { Id = analyticId }, context ) );
}
/// <summary>
/// Returns the analytic metadata for a flight.
/// </summary>
/// <param name="flightId">
/// The integer ID of the flight record for which to retrieve data.
/// </param>
/// <param name="analyticId">
/// The analytic ID (wrapped in double quotes) for which metadata is retrieved.
/// These identifiers are typically obtained from nodes in an analytic group tree.
/// </param>
/// <param name="context">
/// The optional call context to include.
/// </param>
public virtual Metadata GetMetadata( int flightId, string analyticId, CallContext context = null )
{
return AccessTaskResult( GetMetadataAsync( flightId, analyticId, context ) );
}
}
}
| |
//
// Copyright (c) 2004-2020 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using NLog.Config;
namespace NLog.UnitTests.LayoutRenderers
{
using Xunit;
public class NDLCTests : NLogTestBase
{
[Fact]
public void NDLCTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
using (NestedDiagnosticsLogicalContext.Push("ma"))
{
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala ma b");
using (NestedDiagnosticsLogicalContext.Push("kota"))
{
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ala ma kota c");
using (NestedDiagnosticsLogicalContext.Push("kopytko"))
{
LogManager.GetLogger("A").Debug("d");
AssertDebugLastMessage("debug", "ala ma kota kopytko d");
}
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ala ma kota c");
}
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala ma b");
}
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
}
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
}
[Fact]
public void NDLCTopTestTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc:topframes=2} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
using (NestedDiagnosticsLogicalContext.Push("ma"))
{
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala ma b");
using (NestedDiagnosticsLogicalContext.Push("kota"))
{
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ma kota c");
using (NestedDiagnosticsLogicalContext.Push("kopytko"))
{
LogManager.GetLogger("A").Debug("d");
AssertDebugLastMessage("debug", "kota kopytko d");
}
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ma kota c");
}
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala ma b");
}
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
}
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
}
[Fact]
public void NDLCTop1TestTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc:topframes=1} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
using (NestedDiagnosticsLogicalContext.Push("ma"))
{
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ma b");
using (NestedDiagnosticsLogicalContext.Push("kota"))
{
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "kota c");
NestedDiagnosticsLogicalContext.Push("kopytko");
LogManager.GetLogger("A").Debug("d");
AssertDebugLastMessage("debug", "kopytko d");
Assert.Equal("kopytko", NestedDiagnosticsLogicalContext.PopObject()); // manual pop
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "kota c");
}
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ma b");
}
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
}
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
Assert.Null(NestedDiagnosticsLogicalContext.Pop()); //inconsistent with NDC - should be string.empty, but for backwardsscomp. Fix in NLog 5
NestedDiagnosticsLogicalContext.Push("zzz");
NestedDiagnosticsLogicalContext.Push("yyy");
Assert.Equal("yyy", NestedDiagnosticsLogicalContext.Pop());
NestedDiagnosticsLogicalContext.Clear();
Assert.Null(NestedDiagnosticsLogicalContext.Pop()); //inconsistent with NDC - should be string.empty, but for backwardsscomp. Fix in NLog 5
}
[Fact]
public void NDLCBottomTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc:bottomframes=2} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
using (NestedDiagnosticsLogicalContext.Push("ma"))
{
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala ma b");
using (NestedDiagnosticsLogicalContext.Push("kota"))
{
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ala ma c");
using (NestedDiagnosticsLogicalContext.Push("kopytko"))
{
LogManager.GetLogger("A").Debug("d");
AssertDebugLastMessage("debug", "ala ma d");
}
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ala ma c");
}
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala ma b");
}
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
}
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
}
[Fact]
public void NDLCSeparatorTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc:separator=\:} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
using (NestedDiagnosticsLogicalContext.Push("ma"))
{
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala:ma b");
using (NestedDiagnosticsLogicalContext.Push("kota"))
{
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ala:ma:kota c");
using (NestedDiagnosticsLogicalContext.Push("kopytko"))
{
LogManager.GetLogger("A").Debug("d");
AssertDebugLastMessage("debug", "ala:ma:kota:kopytko d");
}
LogManager.GetLogger("A").Debug("c");
AssertDebugLastMessage("debug", "ala:ma:kota c");
}
LogManager.GetLogger("A").Debug("b");
AssertDebugLastMessage("debug", "ala:ma b");
}
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
}
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", " 0");
}
[Fact]
public void NDLCDeepTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc:topframes=1} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
for (int i = 1; i <= 100; ++i)
NestedDiagnosticsLogicalContext.Push(i);
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", "100 0");
NestedDiagnosticsLogicalContext.PopObject();
LogManager.GetLogger("A").Debug("1");
AssertDebugLastMessage("debug", "99 1");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("2");
AssertDebugLastMessage("debug", " 2");
}
[Fact]
public void NDLCTimingTest()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc}|${ndlctiming:CurrentScope=false:ScopeBeginTime=true:Format=yyyy-MM-dd HH\:mm\:ss}|${ndlctiming:CurrentScope=false:ScopeBeginTime=false:Format=fff}|${ndlctiming:CurrentScope=true:ScopeBeginTime=true:Format=HH\:mm\:ss.fff}|${ndlctiming:CurrentScope=true:ScopeBeginTime=false:Format=fffffff}|${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
NestedDiagnosticsLogicalContext.Clear();
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", "|||||0");
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
var measurements = GetDebugLastMessage("debug").Split(new[] { '|' }, System.StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(6, measurements.Length);
Assert.Equal("ala", measurements[0]);
#if !NET3_5
Assert.InRange(int.Parse(measurements[2]), 0, 999);
Assert.InRange(int.Parse(measurements[4]), 0, 9999999);
#endif
Assert.Equal("a", measurements[measurements.Length-1]);
System.Threading.Thread.Sleep(10);
LogManager.GetLogger("A").Debug("b");
measurements = GetDebugLastMessage("debug").Split(new[] { '|' }, System.StringSplitOptions.RemoveEmptyEntries);
Assert.Equal("ala", measurements[0]);
#if !NET3_5
Assert.InRange(int.Parse(measurements[2]), 10, 999);
Assert.InRange(int.Parse(measurements[4]), 100000, 9999999);
#endif
Assert.Equal("b", measurements[measurements.Length - 1]);
using (NestedDiagnosticsLogicalContext.Push("ma"))
{
LogManager.GetLogger("A").Debug("a");
measurements = GetDebugLastMessage("debug").Split(new[] { '|' }, System.StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(6, measurements.Length);
Assert.Equal("ala ma", measurements[0]);
#if !NET3_5
Assert.InRange(int.Parse(measurements[2]), 10, 999);
Assert.InRange(int.Parse(measurements[4]), 0, 9999999);
#endif
Assert.Equal("a", measurements[measurements.Length - 1]);
System.Threading.Thread.Sleep(10);
LogManager.GetLogger("A").Debug("b");
measurements = GetDebugLastMessage("debug").Split(new[] { '|' }, System.StringSplitOptions.RemoveEmptyEntries);
Assert.Equal(6, measurements.Length);
Assert.Equal("ala ma", measurements[0]);
#if !NET3_5
Assert.InRange(int.Parse(measurements[2]), 20, 999);
Assert.InRange(int.Parse(measurements[4]), 100000, 9999999);
#endif
Assert.Equal("b", measurements[measurements.Length - 1]);
}
LogManager.GetLogger("A").Debug("c");
measurements = GetDebugLastMessage("debug").Split(new[] { '|' }, System.StringSplitOptions.RemoveEmptyEntries);
Assert.Equal("ala", measurements[0]);
#if !NET3_5
Assert.InRange(int.Parse(measurements[2]), 20, 999);
Assert.InRange(int.Parse(measurements[4]), 200000, 9999999);
#endif
Assert.Equal("c", measurements[measurements.Length - 1]);
}
LogManager.GetLogger("A").Debug("0");
AssertDebugLastMessage("debug", "|||||0");
}
[Fact]
public void NDLCAsyncLogging()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog>
<targets><target name='debug' type='Debug' layout='${ndlc:separator=\:} ${message}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
System.Threading.Tasks.Task task;
using (NestedDiagnosticsLogicalContext.Push("ala"))
{
LogManager.GetLogger("A").Debug("a");
AssertDebugLastMessage("debug", "ala a");
task = System.Threading.Tasks.Task.Run(async () => { await System.Threading.Tasks.Task.Delay(50); LogManager.GetLogger("B").Debug("b"); });
}
task.Wait();
AssertDebugLastMessage("debug", "ala b");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using Xunit;
public class FileSystemWatcherTests
{
private static void ValidateDefaults(FileSystemWatcher watcher, string path, string filter)
{
Assert.Equal(false, watcher.EnableRaisingEvents);
Assert.Equal(filter, watcher.Filter);
Assert.Equal(false, watcher.IncludeSubdirectories);
Assert.Equal(8192, watcher.InternalBufferSize);
Assert.Equal(NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName, watcher.NotifyFilter);
Assert.Equal(path, watcher.Path);
}
[Fact]
public static void FileSystemWatcher_ctor()
{
string path = String.Empty;
string pattern = "*.*";
using (FileSystemWatcher watcher = new FileSystemWatcher())
ValidateDefaults(watcher, path, pattern);
}
[Fact]
public static void FileSystemWatcher_ctor_path()
{
string path = @".";
string pattern = "*.*";
using (FileSystemWatcher watcher = new FileSystemWatcher(path))
ValidateDefaults(watcher, path, pattern);
}
[Fact]
public static void FileSystemWatcher_ctor_path_pattern()
{
string path = @".";
string pattern = "honey.jar";
using (FileSystemWatcher watcher = new FileSystemWatcher(path, pattern))
ValidateDefaults(watcher, path, pattern);
}
[Fact]
public static void FileSystemWatcher_ctor_InvalidStrings()
{
// Null filter
Assert.Throws<ArgumentNullException>("filter", () => new FileSystemWatcher(".", null));
// Null path
Assert.Throws<ArgumentNullException>("path", () => new FileSystemWatcher(null));
Assert.Throws<ArgumentNullException>("path", () => new FileSystemWatcher(null, "*"));
// Empty path
Assert.Throws<ArgumentException>("path", () => new FileSystemWatcher(string.Empty));
Assert.Throws<ArgumentException>("path", () => new FileSystemWatcher(string.Empty, "*"));
// Invalid directory
Assert.Throws<ArgumentException>("path", () => new FileSystemWatcher(Guid.NewGuid().ToString()));
Assert.Throws<ArgumentException>("path", () => new FileSystemWatcher(Guid.NewGuid().ToString(), "*"));
}
[Fact]
public static void FileSystemWatcher_Changed()
{
using (FileSystemWatcher watcher = new FileSystemWatcher())
{
var handler = new FileSystemEventHandler((o, e) => { });
// add / remove
watcher.Changed += handler;
watcher.Changed -= handler;
// shouldn't throw
watcher.Changed -= handler;
}
}
[Fact]
public static void FileSystemWatcher_Created()
{
using (FileSystemWatcher watcher = new FileSystemWatcher())
{
var handler = new FileSystemEventHandler((o, e) => { });
// add / remove
watcher.Created += handler;
watcher.Created -= handler;
// shouldn't throw
watcher.Created -= handler;
}
}
[Fact]
public static void FileSystemWatcher_Deleted()
{
using (FileSystemWatcher watcher = new FileSystemWatcher())
{
var handler = new FileSystemEventHandler((o, e) => { });
// add / remove
watcher.Deleted += handler;
watcher.Deleted -= handler;
// shouldn't throw
watcher.Deleted -= handler;
}
}
[Fact]
public static void FileSystemWatcher_Disposed()
{
FileSystemWatcher watcher = new FileSystemWatcher();
watcher.Dispose();
watcher.Dispose(); // shouldn't throw
Assert.Throws<ObjectDisposedException>(() => watcher.EnableRaisingEvents = true);
}
[Fact]
public static void FileSystemWatcher_EnableRaisingEvents()
{
FileSystemWatcher watcher = new FileSystemWatcher(".");
Assert.Equal(false, watcher.EnableRaisingEvents);
watcher.EnableRaisingEvents = true;
Assert.Equal(true, watcher.EnableRaisingEvents);
watcher.EnableRaisingEvents = false;
Assert.Equal(false, watcher.EnableRaisingEvents);
}
[Fact]
public static void FileSystemWatcher_Error()
{
using (FileSystemWatcher watcher = new FileSystemWatcher())
{
var handler = new ErrorEventHandler((o, e) => { });
// add / remove
watcher.Error += handler;
watcher.Error -= handler;
// shouldn't throw
watcher.Error -= handler;
}
}
[Fact]
public static void FileSystemWatcher_Filter()
{
FileSystemWatcher watcher = new FileSystemWatcher();
Assert.Equal("*.*", watcher.Filter);
// Null and empty should be mapped to "*.*"
watcher.Filter = null;
Assert.Equal("*.*", watcher.Filter);
watcher.Filter = String.Empty;
Assert.Equal("*.*", watcher.Filter);
watcher.Filter = " ";
Assert.Equal(" ", watcher.Filter);
watcher.Filter = "\0";
Assert.Equal("\0", watcher.Filter);
watcher.Filter = "\n";
Assert.Equal("\n", watcher.Filter);
watcher.Filter = "abc.dll";
Assert.Equal("abc.dll", watcher.Filter);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) || // expect no change for OrdinalIgnoreCase-equal strings
RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
// expect no change for OrdinalIgnoreCase-equal strings
// it's unclear why desktop does this but preserve it for compat
watcher.Filter = "ABC.DLL";
Assert.Equal("abc.dll", watcher.Filter);
}
// We can make this setting by first changing to another value then back.
watcher.Filter = null;
watcher.Filter = "ABC.DLL";
Assert.Equal("ABC.DLL", watcher.Filter);
}
[Fact]
public static void FileSystemWatcher_IncludeSubdirectories()
{
FileSystemWatcher watcher = new FileSystemWatcher();
Assert.Equal(false, watcher.IncludeSubdirectories);
watcher.IncludeSubdirectories = true;
Assert.Equal(true, watcher.IncludeSubdirectories);
watcher.IncludeSubdirectories = false;
Assert.Equal(false, watcher.IncludeSubdirectories);
}
[Fact]
public static void FileSystemWatcher_InternalBufferSize()
{
FileSystemWatcher watcher = new FileSystemWatcher();
Assert.Equal(8192, watcher.InternalBufferSize);
watcher.InternalBufferSize = 20000;
Assert.Equal(20000, watcher.InternalBufferSize);
watcher.InternalBufferSize = int.MaxValue;
Assert.Equal(int.MaxValue, watcher.InternalBufferSize);
// FSW enforces a minimum value of 4096
watcher.InternalBufferSize = 0;
Assert.Equal(4096, watcher.InternalBufferSize);
watcher.InternalBufferSize = -1;
Assert.Equal(4096, watcher.InternalBufferSize);
watcher.InternalBufferSize = int.MinValue;
Assert.Equal(4096, watcher.InternalBufferSize);
watcher.InternalBufferSize = 4095;
Assert.Equal(4096, watcher.InternalBufferSize);
}
[Fact]
public static void FileSystemWatcher_NotifyFilter()
{
FileSystemWatcher watcher = new FileSystemWatcher();
Assert.Equal(NotifyFilters.LastWrite | NotifyFilters.FileName | NotifyFilters.DirectoryName, watcher.NotifyFilter);
var notifyFilters = Enum.GetValues(typeof(NotifyFilters)).Cast<NotifyFilters>();
foreach (NotifyFilters filterValue in notifyFilters)
{
watcher.NotifyFilter = filterValue;
Assert.Equal(filterValue, watcher.NotifyFilter);
}
var allFilters = notifyFilters.Aggregate((mask, flag) => mask | flag);
watcher.NotifyFilter = allFilters;
Assert.Equal(allFilters, watcher.NotifyFilter);
// This doesn't make sense, but it is permitted.
watcher.NotifyFilter = 0;
Assert.Equal((NotifyFilters)0, watcher.NotifyFilter);
// These throw InvalidEnumException on desktop, but ArgumentException on K
Assert.Throws<ArgumentException>(() => watcher.NotifyFilter = (NotifyFilters)(-1));
Assert.Throws<ArgumentException>(() => watcher.NotifyFilter = (NotifyFilters)int.MinValue);
Assert.Throws<ArgumentException>(() => watcher.NotifyFilter = (NotifyFilters)int.MaxValue);
Assert.Throws<ArgumentException>(() => watcher.NotifyFilter = allFilters + 1);
// Simulate a bit added to the flags
Assert.Throws<ArgumentException>(() => watcher.NotifyFilter = allFilters | (NotifyFilters)((int)notifyFilters.Max() << 1));
}
[Fact]
public static void FileSystemWatcher_OnChanged()
{
using (TestFileSystemWatcher watcher = new TestFileSystemWatcher())
{
bool eventOccurred = false;
object obj = null;
FileSystemEventArgs actualArgs = null, expectedArgs = new FileSystemEventArgs(WatcherChangeTypes.Changed, "directory", "file");
watcher.Changed += (o, e) =>
{
eventOccurred = true;
obj = o;
actualArgs = e;
};
watcher.CallOnChanged(expectedArgs);
Assert.True(eventOccurred, "Event should be invoked");
Assert.Equal(watcher, obj);
Assert.Equal(expectedArgs, actualArgs);
}
}
[Fact]
public static void FileSystemWatcher_OnChangedGivesExpectedFullPath()
{
using (var dir = Utility.CreateTestDirectory())
using (var fsw = new FileSystemWatcher(dir.Path))
{
AutoResetEvent are = new AutoResetEvent(false);
string fullPath = Path.Combine(dir.Path, "Foo.txt");
fsw.Created += (o, e) =>
{
Assert.Equal(e.FullPath, fullPath);
are.Set();
};
fsw.EnableRaisingEvents = true;
using (var file = Utility.CreateTestFile(fullPath))
{
File.SetLastWriteTime(file.Path, DateTime.Now + TimeSpan.FromSeconds(10));
are.WaitOne(Utility.WaitForExpectedEventTimeout);
}
}
}
[Fact]
public static void FileSystemWatcher_OnCreated()
{
using (TestFileSystemWatcher watcher = new TestFileSystemWatcher())
{
bool eventOccurred = false;
object obj = null;
FileSystemEventArgs actualArgs = null, expectedArgs = new FileSystemEventArgs(WatcherChangeTypes.Created, "directory", "file");
watcher.Created += (o, e) =>
{
eventOccurred = true;
obj = o;
actualArgs = e;
};
watcher.CallOnCreated(expectedArgs);
Assert.True(eventOccurred, "Event should be invoked");
Assert.Equal(watcher, obj);
Assert.Equal(expectedArgs, actualArgs);
}
}
[Fact]
public static void FileSystemWatcher_OnCreatedGivesExpectedFullPath()
{
using (var dir = Utility.CreateTestDirectory())
using (var fsw = new FileSystemWatcher(dir.Path))
{
AutoResetEvent are = new AutoResetEvent(false);
string fullPath = Path.Combine(dir.Path, "Foo.txt");
fsw.Created += (o, e) =>
{
Assert.Equal(e.FullPath, fullPath);
are.Set();
};
fsw.EnableRaisingEvents = true;
using (var file = Utility.CreateTestFile(fullPath))
{
are.WaitOne(Utility.WaitForExpectedEventTimeout);
}
}
}
[Fact]
[PlatformSpecific(PlatformID.OSX | PlatformID.Windows)]
public static void FileSystemWatcher_OnCreatedWithMismatchedCasingGivesExpectedFullPath()
{
using (var dir = Utility.CreateTestDirectory())
using (var fsw = new FileSystemWatcher(dir.Path))
{
AutoResetEvent are = new AutoResetEvent(false);
string fullPath = Path.Combine(dir.Path.ToUpper(), "Foo.txt");
fsw.Created += (o, e) =>
{
Assert.True(fullPath.Equals(e.FullPath, StringComparison.OrdinalIgnoreCase));
are.Set();
};
fsw.EnableRaisingEvents = true;
using (var file = Utility.CreateTestFile(fullPath))
{
are.WaitOne(Utility.WaitForExpectedEventTimeout);
}
}
}
[Fact]
public static void FileSystemWatcher_OnDeleted()
{
using (TestFileSystemWatcher watcher = new TestFileSystemWatcher())
{
bool eventOccurred = false;
object obj = null;
FileSystemEventArgs actualArgs = null, expectedArgs = new FileSystemEventArgs(WatcherChangeTypes.Deleted, "directory", "file");
watcher.Deleted += (o, e) =>
{
eventOccurred = true;
obj = o;
actualArgs = e;
};
watcher.CallOnDeleted(expectedArgs);
Assert.True(eventOccurred, "Event should be invoked");
Assert.Equal(watcher, obj);
Assert.Equal(expectedArgs, actualArgs);
}
}
[Fact]
public static void FileSystemWatcher_OnDeletedGivesExpectedFullPath()
{
using (var dir = Utility.CreateTestDirectory())
using (var fsw = new FileSystemWatcher(dir.Path))
{
AutoResetEvent are = new AutoResetEvent(false);
string fullPath = Path.Combine(dir.Path, "Foo.txt");
fsw.Deleted += (o, e) =>
{
Assert.Equal(e.FullPath, fullPath);
are.Set();
};
fsw.EnableRaisingEvents = true;
using (var file = Utility.CreateTestFile(fullPath)) { }
are.WaitOne(Utility.WaitForExpectedEventTimeout);
}
}
[Fact]
public static void FileSystemWatcher_OnError()
{
using (TestFileSystemWatcher watcher = new TestFileSystemWatcher())
{
bool eventOccurred = false;
object obj = null;
ErrorEventArgs actualArgs = null, expectedArgs = new ErrorEventArgs(new Exception());
watcher.Error += (o, e) =>
{
eventOccurred = true;
obj = o;
actualArgs = e;
};
watcher.CallOnError(expectedArgs);
Assert.True(eventOccurred, "Event should be invoked");
Assert.Equal(watcher, obj);
Assert.Equal(expectedArgs, actualArgs);
}
}
[Fact]
public static void FileSystemWatcher_OnRenamed()
{
using (TestFileSystemWatcher watcher = new TestFileSystemWatcher())
{
bool eventOccurred = false;
object obj = null;
RenamedEventArgs actualArgs = null, expectedArgs = new RenamedEventArgs(WatcherChangeTypes.Renamed, "directory", "file", "oldFile");
watcher.Renamed += (o, e) =>
{
eventOccurred = true;
obj = o;
actualArgs = e;
};
watcher.CallOnRenamed(expectedArgs);
Assert.True(eventOccurred, "Event should be invoked");
Assert.Equal(watcher, obj);
Assert.Equal(expectedArgs, actualArgs);
}
}
[Fact]
public static void FileSystemWatcher_OnRenameGivesExpectedFullPath()
{
using (var dir = Utility.CreateTestDirectory())
using (var fsw = new FileSystemWatcher(dir.Path))
{
AutoResetEvent are = new AutoResetEvent(false);
string fullOriginalPath = Path.Combine(dir.Path, "Foo.txt");
string fullNewPath = Path.Combine(dir.Path, "Foo2.txt");
fsw.Renamed += (o, e) =>
{
Assert.Equal(e.OldFullPath, fullOriginalPath);
Assert.Equal(e.FullPath, fullNewPath);
are.Set();
};
fsw.EnableRaisingEvents = true;
using (var file = Utility.CreateTestFile(fullOriginalPath))
{
file.Move(fullNewPath);
are.WaitOne(Utility.WaitForExpectedEventTimeout);
}
}
}
[Fact]
public static void FileSystemWatcher_Path()
{
FileSystemWatcher watcher = new FileSystemWatcher();
Assert.Equal(String.Empty, watcher.Path);
watcher.Path = null;
Assert.Equal(String.Empty, watcher.Path);
watcher.Path = ".";
Assert.Equal(".", watcher.Path);
watcher.Path = "..";
Assert.Equal("..", watcher.Path);
string currentDir = Path.GetFullPath(".").TrimEnd('.', Path.DirectorySeparatorChar);
watcher.Path = currentDir;
Assert.Equal(currentDir, watcher.Path);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows) || // expect no change for OrdinalIgnoreCase-equal strings
RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
watcher.Path = currentDir.ToUpperInvariant();
Assert.Equal(currentDir, watcher.Path);
watcher.Path = currentDir.ToLowerInvariant();
Assert.Equal(currentDir, watcher.Path);
}
// expect a change for same "full-path" but different string path, FSW does not normalize
string currentDirRelative = currentDir +
Path.DirectorySeparatorChar + "." +
Path.DirectorySeparatorChar + "." +
Path.DirectorySeparatorChar + "." +
Path.DirectorySeparatorChar + ".";
watcher.Path = currentDirRelative;
Assert.Equal(currentDirRelative, watcher.Path);
// FSW starts with String.Empty and will ignore setting this if it is already set,
// but if you set it after some other valid string has been set it will throw.
Assert.Throws<ArgumentException>(() => watcher.Path = String.Empty);
// Non-existent path
Assert.Throws<ArgumentException>(() => watcher.Path = Guid.NewGuid().ToString());
// Web path
Assert.Throws<ArgumentException>(() => watcher.Path = "http://localhost");
// File protocol
Assert.Throws<ArgumentException>(() => watcher.Path = "file:///" + currentDir.Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar));
}
[Fact]
public static void FileSystemWatcher_Renamed()
{
using (FileSystemWatcher watcher = new FileSystemWatcher())
{
var handler = new RenamedEventHandler((o, e) => { });
// add / remove
watcher.Renamed += handler;
watcher.Renamed -= handler;
// shouldn't throw
watcher.Renamed -= handler;
}
}
[PlatformSpecific(PlatformID.Linux)]
[Fact]
public static void FileSystemWatcher_CreateManyConcurrentInstances()
{
int maxUserInstances = int.Parse(File.ReadAllText("/proc/sys/fs/inotify/max_user_instances"));
var watchers = new List<FileSystemWatcher>();
using (var dir = Utility.CreateTestDirectory())
{
try
{
Assert.Throws<IOException>(() =>
{
// Create enough inotify instances to exceed the number of allowed watches
for (int i = 0; i <= maxUserInstances; i++)
{
watchers.Add(new FileSystemWatcher(dir.Path) { EnableRaisingEvents = true });
}
});
}
finally
{
foreach (FileSystemWatcher watcher in watchers)
{
watcher.Dispose();
}
}
}
}
[PlatformSpecific(PlatformID.Linux)]
[Theory]
[InlineData(true)]
[InlineData(false)]
public static void FileSystemWatcher_CreateManyConcurrentWatches(bool enableBeforeCreatingWatches)
{
int maxUserWatches = int.Parse(File.ReadAllText("/proc/sys/fs/inotify/max_user_watches"));
using (var dir = Utility.CreateTestDirectory())
using (var watcher = new FileSystemWatcher(dir.Path) { IncludeSubdirectories = true, NotifyFilter = NotifyFilters.FileName })
{
Exception exc = null;
ManualResetEventSlim mres = new ManualResetEventSlim();
watcher.Error += (s, e) =>
{
exc = e.GetException();
mres.Set();
};
if (enableBeforeCreatingWatches)
watcher.EnableRaisingEvents = true;
// Create enough directories to exceed the number of allowed watches
for (int i = 0; i <= maxUserWatches; i++)
{
Directory.CreateDirectory(Path.Combine(dir.Path, i.ToString()));
}
if (!enableBeforeCreatingWatches)
watcher.EnableRaisingEvents = true;
Assert.True(mres.Wait(Utility.WaitForExpectedEventTimeout));
Assert.IsType<IOException>(exc);
// Make sure existing watches still work even after we've had one or more failures
AutoResetEvent are = Utility.WatchForEvents(watcher, WatcherChangeTypes.Created);
Utility.CreateTestFile(Path.Combine(dir.Path, Path.GetRandomFileName())).Dispose();
Utility.ExpectEvent(are, "file created");
}
}
[Fact]
public static void FileSystemWatcher_StopCalledOnBackgroundThreadDoesNotDeadlock()
{
// Check the case where Stop or Dispose (they do the same thing) is called from
// a FSW event callback and make sure we don't Thread.Join to deadlock
using (var dir = Utility.CreateTestDirectory())
{
FileSystemWatcher watcher = new FileSystemWatcher();
AutoResetEvent are = new AutoResetEvent(false);
FileSystemEventHandler callback = (sender, arg) => {
watcher.Dispose();
are.Set();
};
// Attach the FSW to the existing structure
watcher.Path = Path.GetFullPath(dir.Path);
watcher.Filter = "*";
watcher.NotifyFilter = NotifyFilters.FileName | NotifyFilters.Size;
watcher.Changed += callback;
using (var file = File.Create(Path.Combine(dir.Path, "testfile.txt")))
{
watcher.EnableRaisingEvents = true;
// Change the nested file and verify we get the changed event
byte[] bt = new byte[4096];
file.Write(bt, 0, bt.Length);
file.Flush();
}
are.WaitOne(Utility.WaitForExpectedEventTimeout);
}
}
[Fact]
public static void FileSystemWatcher_WatchingAliasedFolderResolvesToRealPathWhenWatching()
{
using (var dir = Utility.CreateTestDirectory(Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString())))
using (var fsw = new FileSystemWatcher(dir.Path))
{
AutoResetEvent are = Utility.WatchForEvents(fsw, WatcherChangeTypes.Created);
fsw.Filter = "*";
fsw.EnableRaisingEvents = true;
using (var temp = Utility.CreateTestDirectory(Path.Combine(dir.Path, "foo")))
{
Utility.ExpectEvent(are, "created");
}
}
}
}
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.Algo.Candles.Compression.Algo
File: RealTimeCandleBuilderSource.cs
Created: 2015, 11, 11, 2:32 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.Algo.Candles.Compression
{
using System;
using System.Collections.Generic;
using System.Linq;
using Ecng.Collections;
using Ecng.ComponentModel;
using StockSharp.BusinessEntities;
/// <summary>
/// The base data source for <see cref="ICandleBuilder"/> which receives data from <see cref="IConnector"/>.
/// </summary>
/// <typeparam name="T">The source data type (for example, <see cref="Trade"/>).</typeparam>
public abstract class RealTimeCandleBuilderSource<T> : BaseCandleBuilderSource
{
private readonly SynchronizedDictionary<Security, CachedSynchronizedList<CandleSeries>> _registeredSeries = new SynchronizedDictionary<Security, CachedSynchronizedList<CandleSeries>>();
private readonly OrderedPriorityQueue<DateTimeOffset, CandleSeries> _seriesByDates = new OrderedPriorityQueue<DateTimeOffset, CandleSeries>();
/// <summary>
/// Initializes a new instance of the <see cref="RealTimeCandleBuilderSource{T}"/>.
/// </summary>
/// <param name="connector">The connection through which new data will be received.</param>
protected RealTimeCandleBuilderSource(IConnector connector)
{
if (connector == null)
throw new ArgumentNullException(nameof(connector));
Connector = connector;
Connector.MarketTimeChanged += OnConnectorMarketTimeChanged;
}
/// <summary>
/// The source priority by speed (0 - the best).
/// </summary>
public override int SpeedPriority => 1;
/// <summary>
/// The connection through which new data will be received.
/// </summary>
public IConnector Connector { get; }
/// <summary>
/// To send data request.
/// </summary>
/// <param name="series">The candles series for which data receiving should be started.</param>
/// <param name="from">The initial date from which you need to get data.</param>
/// <param name="to">The final date by which you need to get data.</param>
public override void Start(CandleSeries series, DateTimeOffset from, DateTimeOffset to)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
bool registerSecurity;
series.IsNew = true;
_registeredSeries.SafeAdd(series.Security, out registerSecurity).Add(series);
if (registerSecurity)
RegisterSecurity(series.Security);
_seriesByDates.Add(new KeyValuePair<DateTimeOffset, CandleSeries>(to, series));
}
/// <summary>
/// To stop data receiving starting through <see cref="Start"/>.
/// </summary>
/// <param name="series">Candles series.</param>
public override void Stop(CandleSeries series)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
var registeredSeries = _registeredSeries.TryGetValue(series.Security);
if (registeredSeries == null)
return;
registeredSeries.Remove(series);
if (registeredSeries.Count == 0)
{
UnRegisterSecurity(series.Security);
_registeredSeries.Remove(series.Security);
}
_seriesByDates.RemoveWhere(i => i.Value == series);
RaiseStopped(series);
}
/// <summary>
/// To register the getting data for the instrument.
/// </summary>
/// <param name="security">Security.</param>
protected abstract void RegisterSecurity(Security security);
/// <summary>
/// To stop the getting data for the instrument.
/// </summary>
/// <param name="security">Security.</param>
protected abstract void UnRegisterSecurity(Security security);
/// <summary>
/// To get previously accumulated values.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>Accumulated values.</returns>
protected abstract IEnumerable<T> GetSecurityValues(Security security);
/// <summary>
/// To convert <typeparam ref="T"/> to <see cref="ICandleBuilderSourceValue"/>.
/// </summary>
/// <param name="value">New source data.</param>
/// <returns>Data in format <see cref="ICandleBuilder"/>.</returns>
protected abstract ICandleBuilderSourceValue Convert(T value);
/// <summary>
/// Synchronously to add new data received from <see cref="Connector"/>.
/// </summary>
/// <param name="values">New data.</param>
protected void AddNewValues(IEnumerable<T> values)
{
if (_registeredSeries.Count == 0)
return;
foreach (var group in values.Select(Convert).GroupBy(v => v.Security))
{
var security = group.Key;
var registeredSeries = _registeredSeries.TryGetValue(security);
if (registeredSeries == null)
continue;
var seriesCache = registeredSeries.Cache;
var securityValues = group.OrderBy(v => v.Time).ToArray();
foreach (var series in seriesCache)
{
if (series.IsNew)
{
RaiseProcessing(series, GetSecurityValues(security).Select(Convert).OrderBy(v => v.Time));
series.IsNew = false;
}
else
{
RaiseProcessing(series, securityValues);
}
}
}
}
private void OnConnectorMarketTimeChanged(TimeSpan value)
{
if (_seriesByDates.Count == 0)
return;
var pair = _seriesByDates.Peek();
while (pair.Key <= Connector.CurrentTime)
{
_seriesByDates.Dequeue();
Stop(pair.Value);
if (_seriesByDates.Count == 0)
break;
pair = _seriesByDates.Peek();
}
}
}
/// <summary>
/// The data source for <see cref="CandleBuilder{T}"/> which creates <see cref="ICandleBuilderSourceValue"/> from tick trades <see cref="Trade"/>.
/// </summary>
public class TradeCandleBuilderSource : RealTimeCandleBuilderSource<Trade>
{
/// <summary>
/// Initializes a new instance of the <see cref="TradeCandleBuilderSource"/>.
/// </summary>
/// <param name="connector">The connection through which new trades will be received using the event <see cref="IConnector.NewTrades"/>.</param>
public TradeCandleBuilderSource(IConnector connector)
: base(connector)
{
Connector.NewTrades += AddNewValues;
}
/// <summary>
/// To get time ranges for which this source of passed candles series has data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <returns>Time ranges.</returns>
public override IEnumerable<Range<DateTimeOffset>> GetSupportedRanges(CandleSeries series)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
var trades = GetSecurityValues(series.Security);
yield return new Range<DateTimeOffset>(trades.IsEmpty() ? Connector.CurrentTime : trades.Min(v => v.Time), DateTimeOffset.MaxValue);
}
/// <summary>
/// To register the getting data for the instrument.
/// </summary>
/// <param name="security">Security.</param>
protected override void RegisterSecurity(Security security)
{
Connector.RegisterTrades(security);
}
/// <summary>
/// To stop the getting data for the instrument.
/// </summary>
/// <param name="security">Security.</param>
protected override void UnRegisterSecurity(Security security)
{
Connector.UnRegisterTrades(security);
}
/// <summary>
/// To get previously accumulated values.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>Accumulated values.</returns>
protected override IEnumerable<Trade> GetSecurityValues(Security security)
{
return Connector.Trades.Filter(security);
}
/// <summary>
/// To convert <typeparam ref="TSourceValue"/> to <see cref="ICandleBuilderSourceValue"/>.
/// </summary>
/// <param name="value">New source data.</param>
/// <returns>Data in format <see cref="ICandleBuilder"/>.</returns>
protected override ICandleBuilderSourceValue Convert(Trade value)
{
return new TradeCandleBuilderSourceValue(value);
}
/// <summary>
/// Release resources.
/// </summary>
protected override void DisposeManaged()
{
Connector.NewTrades -= AddNewValues;
base.DisposeManaged();
}
}
/// <summary>
/// The data source for <see cref="CandleBuilder{T}"/> which creates <see cref="ICandleBuilderSourceValue"/> from the order book <see cref="MarketDepth"/>.
/// </summary>
public class MarketDepthCandleBuilderSource : RealTimeCandleBuilderSource<MarketDepth>
{
private readonly DepthCandleSourceTypes _type;
/// <summary>
/// Initializes a new instance of the <see cref="MarketDepthCandleBuilderSource"/>.
/// </summary>
/// <param name="connector">The connection through which changed order books will be received using the event <see cref="IConnector.MarketDepthsChanged"/>.</param>
/// <param name="type">Type of candle depth based data.</param>
public MarketDepthCandleBuilderSource(IConnector connector, DepthCandleSourceTypes type)
: base(connector)
{
_type = type;
Connector.MarketDepthsChanged += OnMarketDepthsChanged;
}
/// <summary>
/// To get time ranges for which this source of passed candles series has data.
/// </summary>
/// <param name="series">Candles series.</param>
/// <returns>Time ranges.</returns>
public override IEnumerable<Range<DateTimeOffset>> GetSupportedRanges(CandleSeries series)
{
if (series == null)
throw new ArgumentNullException(nameof(series));
yield return new Range<DateTimeOffset>(Connector.CurrentTime, DateTimeOffset.MaxValue);
}
/// <summary>
/// To register the getting data for the instrument.
/// </summary>
/// <param name="security">Security.</param>
protected override void RegisterSecurity(Security security)
{
Connector.RegisterMarketDepth(security);
}
/// <summary>
/// To stop the getting data for the instrument.
/// </summary>
/// <param name="security">Security.</param>
protected override void UnRegisterSecurity(Security security)
{
Connector.UnRegisterMarketDepth(security);
}
/// <summary>
/// To get previously accumulated values.
/// </summary>
/// <param name="security">Security.</param>
/// <returns>Accumulated values.</returns>
protected override IEnumerable<MarketDepth> GetSecurityValues(Security security)
{
return Enumerable.Empty<MarketDepth>();
}
/// <summary>
/// To convert <typeparam ref="TSourceValue"/> to <see cref="ICandleBuilderSourceValue"/>.
/// </summary>
/// <param name="value">New source data.</param>
/// <returns>Data in format <see cref="ICandleBuilder"/>.</returns>
protected override ICandleBuilderSourceValue Convert(MarketDepth value)
{
return new DepthCandleBuilderSourceValue(value, _type);
}
private void OnMarketDepthsChanged(IEnumerable<MarketDepth> depths)
{
AddNewValues(depths.Select(d => d.Clone()));
}
/// <summary>
/// Release resources.
/// </summary>
protected override void DisposeManaged()
{
Connector.MarketDepthsChanged -= OnMarketDepthsChanged;
base.DisposeManaged();
}
}
}
| |
using System;
using OpenQA.Selenium;
namespace OpenQA.Selenite
{
/// <summary>
/// Represents the key on the keyboard.
/// </summary>
public class Key : IEquatable<Key>
{
public static readonly Key D0 = new Key("0", "0");
public static readonly Key D1 = new Key("1", "1");
public static readonly Key D2 = new Key("2", "2");
public static readonly Key D3 = new Key("3", "3");
public static readonly Key D4 = new Key("4", "4");
public static readonly Key D5 = new Key("5", "5");
public static readonly Key D6 = new Key("6", "6");
public static readonly Key D7 = new Key("7", "7");
public static readonly Key D8 = new Key("8", "8");
public static readonly Key D9 = new Key("9", "9");
public static readonly Key A = new Key("a", "A");
public static readonly Key B = new Key("b", "B");
public static readonly Key C = new Key("c", "C");
public static readonly Key D = new Key("d", "D");
public static readonly Key E = new Key("e", "E");
public static readonly Key F = new Key("f", "F");
public static readonly Key G = new Key("g", "G");
public static readonly Key H = new Key("h", "H");
public static readonly Key I = new Key("i", "I");
public static readonly Key J = new Key("j", "J");
public static readonly Key K = new Key("k", "K");
public static readonly Key L = new Key("l", "L");
public static readonly Key M = new Key("m", "M");
public static readonly Key N = new Key("n", "N");
public static readonly Key O = new Key("o", "O");
public static readonly Key P = new Key("p", "P");
public static readonly Key Q = new Key("q", "Q");
public static readonly Key R = new Key("r", "R");
public static readonly Key S = new Key("s", "S");
public static readonly Key T = new Key("t", "T");
public static readonly Key U = new Key("u", "U");
public static readonly Key V = new Key("v", "V");
public static readonly Key W = new Key("w", "W");
public static readonly Key X = new Key("x", "X");
public static readonly Key Y = new Key("y", "Y");
public static readonly Key Z = new Key("z", "Z");
public static readonly Key Alt = new Key(Keys.Alt, "Alt");
public static readonly Key Apostrophe = new Key("'", "'");
public static readonly Key Backslash = new Key(@"\", @"\");
public static readonly Key Backspace = new Key(Keys.Backspace, "Backspace");
public static readonly Key Comma = new Key(",", ",");
public static readonly Key Command = new Key(Keys.Command, "Command");
public static readonly Key Control = new Key(Keys.Control, "Control");
public static readonly Key Enter = new Key(Keys.Enter, "Enter");
public static readonly Key Equal = new Key(Keys.Equal, "Equal");
public static readonly Key Escape = new Key(Keys.Escape, "Escape");
public static readonly Key Grave = new Key("`", "`");
public static readonly Key LeftBracket = new Key("[", "[");
public static readonly Key Period = new Key(".", ".");
public static readonly Key RightBracket = new Key("]", "]");
public static readonly Key Semicolon = new Key(Keys.Semicolon, ";");
public static readonly Key Shift = new Key(Keys.Shift, "Shift");
public static readonly Key Slash = new Key("/", "/");
public static readonly Key Space = new Key(Keys.Space, "Space");
public static readonly Key Tab = new Key(Keys.Tab, "Tab");
private readonly string _visualization;
private Key(string key, string visualization)
{
Value = key;
_visualization = visualization;
}
internal string Value { get; }
public bool Equals(Key other)
{
bool result;
if (ReferenceEquals(null, other))
{
result = false;
}
else if (ReferenceEquals(this, other))
{
result = true;
}
else
{
result = string.Equals(Value, other.Value);
}
return result;
}
// TODO: provide a constructor (or implicit assignment operator?) for System.ConsoleKey and System.Windows.Forms.Keys???
public static Key operator +(Key left, Key right)
{
return new Key(string.Format("{0}{1}", left.Value, right.Value), string.Format("{0}+{1}", left, right));
}
public static Key operator |(Key left, Key right)
{
return new Key(string.Format("{0}{1}", left.Value, right.Value), string.Format("{0}+{1}", left, right));
}
public override bool Equals(object obj)
{
bool result;
if (ReferenceEquals(null, obj))
{
result = false;
}
else if (ReferenceEquals(this, obj))
{
result = true;
}
else if (obj.GetType() != GetType())
{
result = false;
}
else
{
result = Equals((Key) obj);
}
return result;
}
public override int GetHashCode()
{
return Value != null ? Value.GetHashCode() : 0;
}
public override string ToString()
{
return _visualization;
}
/// <summary>
/// Represents the function keys found at the top of a standard keyboard.
/// </summary>
public static class Function
{
public static readonly Key F1 = new Key(Keys.F1, "F1");
public static readonly Key F2 = new Key(Keys.F2, "F2");
public static readonly Key F3 = new Key(Keys.F3, "F3");
public static readonly Key F4 = new Key(Keys.F4, "F4");
public static readonly Key F5 = new Key(Keys.F5, "F5");
public static readonly Key F6 = new Key(Keys.F6, "F6");
public static readonly Key F7 = new Key(Keys.F7, "F7");
public static readonly Key F8 = new Key(Keys.F8, "F8");
public static readonly Key F9 = new Key(Keys.F9, "F9");
public static readonly Key F10 = new Key(Keys.F10, "F10");
public static readonly Key F11 = new Key(Keys.F11, "F11");
public static readonly Key F12 = new Key(Keys.F12, "F12");
}
/// <summary>
/// Represents the arrow keys found to the right on a standard keyboard.
/// </summary>
public static class Arrows
{
public static readonly Key Up = new Key(Keys.Up, "Up");
public static readonly Key Down = new Key(Keys.Down, "Down");
public static readonly Key Left = new Key(Keys.Left, "Left");
public static readonly Key Right = new Key(Keys.Right, "Right");
}
/// <summary>
/// Represents the keys that are part of the standard 10-key on the far right of a standard keyboard.
/// </summary>
public static class Numpad
{
public static readonly Key NumberPad0 = new Key(Keys.NumberPad0, "NumberPad0");
public static readonly Key NumberPad1 = new Key(Keys.NumberPad1, "NumberPad1");
public static readonly Key NumberPad2 = new Key(Keys.NumberPad2, "NumberPad2");
public static readonly Key NumberPad3 = new Key(Keys.NumberPad3, "NumberPad3");
public static readonly Key NumberPad4 = new Key(Keys.NumberPad4, "NumberPad4");
public static readonly Key NumberPad5 = new Key(Keys.NumberPad5, "NumberPad5");
public static readonly Key NumberPad6 = new Key(Keys.NumberPad6, "NumberPad6");
public static readonly Key NumberPad7 = new Key(Keys.NumberPad7, "NumberPad7");
public static readonly Key NumberPad8 = new Key(Keys.NumberPad8, "NumberPad8");
public static readonly Key NumberPad9 = new Key(Keys.NumberPad9, "NumberPad9");
public static readonly Key Add = new Key(Keys.Add, "Add");
public static readonly Key Subtract = new Key(Keys.Subtract, "Subtract");
public static readonly Key Multiply = new Key(Keys.Multiply, "Multiply");
public static readonly Key Divide = new Key(Keys.Divide, "Divide");
public static readonly Key Decimal = new Key(Keys.Decimal, "Decimal");
}
/// <summary>
/// Represents the keys not found in any of the other regions on the keyboard.
/// </summary>
public static class Other
{
public static readonly Key Insert = new Key(Keys.Insert, "Insert");
public static readonly Key Delete = new Key(Keys.Delete, "Delete");
public static readonly Key Home = new Key(Keys.Home, "Home");
public static readonly Key End = new Key(Keys.End, "End");
public static readonly Key PageUp = new Key(Keys.PageUp, "PageUp");
public static readonly Key PageDown = new Key(Keys.PageDown, "PageDown");
public static readonly Key Pause = new Key(Keys.Pause, "Pause");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Globalization;
using Xunit;
namespace System.Text.RegularExpressions
{
public class RegexUnicodeCharTests
{
private const int MaxUnicodeRange = 2 << 15;
[Fact]
public static void RegexUnicodeChar()
{
// Regex engine is Unicode aware now for the \w and \d character classes
// \s is not - i.e. it still only recognizes the ASCII space separators, not Unicode ones
// The new character classes for this:
// [\p{L1}\p{Lu}\p{Lt}\p{Lo}\p{Nd}\p{Pc}]
List<char> validChars = new List<char>();
List<char> invalidChars = new List<char>();
for (int i = 0; i < MaxUnicodeRange; i++)
{
char c = (char)i;
switch (CharUnicodeInfo.GetUnicodeCategory(c))
{
case UnicodeCategory.UppercaseLetter: //Lu
case UnicodeCategory.LowercaseLetter: //Li
case UnicodeCategory.TitlecaseLetter: // Lt
case UnicodeCategory.ModifierLetter: // Lm
case UnicodeCategory.OtherLetter: // Lo
case UnicodeCategory.DecimalDigitNumber: // Nd
// case UnicodeCategory.LetterNumber: // ??
// case UnicodeCategory.OtherNumber: // ??
case UnicodeCategory.NonSpacingMark:
// case UnicodeCategory.SpacingCombiningMark: // Mc
case UnicodeCategory.ConnectorPunctuation: // Pc
validChars.Add(c);
break;
default:
invalidChars.Add(c);
break;
}
}
// \w - we will create strings from valid characters that form \w and make sure that the regex engine catches this.
// Build a random string with valid characters followed by invalid characters
Random random = new Random(-55);
Regex regex = new Regex(@"\w*");
int validCharLength = 10;
int invalidCharLength = 15;
for (int i = 0; i < 100; i++)
{
StringBuilder builder1 = new StringBuilder();
StringBuilder builder2 = new StringBuilder();
for (int j = 0; j < validCharLength; j++)
{
char c = validChars[random.Next(validChars.Count)];
builder1.Append(c);
builder2.Append(c);
}
for (int j = 0; j < invalidCharLength; j++)
builder1.Append(invalidChars[random.Next(invalidChars.Count)]);
string input = builder1.ToString();
Match match = regex.Match(input);
Assert.True(match.Success);
Assert.Equal(builder2.ToString(), match.Value);
Assert.Equal(0, match.Index);
Assert.Equal(validCharLength, match.Length);
match = match.NextMatch();
do
{
// We get empty matches for each of the non-matching characters of input to match
// the * wildcard in regex pattern.
Assert.Equal(string.Empty, match.Value);
Assert.Equal(0, match.Length);
match = match.NextMatch();
} while (match.Success);
}
// Build a random string with invalid characters followed by valid characters and then again invalid
random = new Random(-55);
regex = new Regex(@"\w+");
validCharLength = 10;
invalidCharLength = 15;
for (int i = 0; i < 500; i++)
{
StringBuilder builder1 = new StringBuilder();
StringBuilder builder2 = new StringBuilder();
for (int j = 0; j < invalidCharLength; j++)
builder1.Append(invalidChars[random.Next(invalidChars.Count)]);
for (int j = 0; j < validCharLength; j++)
{
char c = validChars[random.Next(validChars.Count)];
builder1.Append(c);
builder2.Append(c);
}
for (int j = 0; j < invalidCharLength; j++)
builder1.Append(invalidChars[random.Next(invalidChars.Count)]);
string input = builder1.ToString();
Match match = regex.Match(input);
Assert.True(match.Success);
Assert.Equal(builder2.ToString(), match.Value);
Assert.Equal(invalidCharLength, match.Index);
Assert.Equal(validCharLength, match.Length);
match = match.NextMatch();
Assert.False(match.Success);
}
validChars = new List<char>();
invalidChars = new List<char>();
for (int i = 0; i < MaxUnicodeRange; i++)
{
char c = (char)i;
if (CharUnicodeInfo.GetUnicodeCategory(c) == UnicodeCategory.DecimalDigitNumber)
{
validChars.Add(c);
}
else
{
invalidChars.Add(c);
}
}
// \d - we will create strings from valid characters that form \d and make sure that the regex engine catches this.
// Build a random string with valid characters and then again invalid
regex = new Regex(@"\d+");
validCharLength = 10;
invalidCharLength = 15;
for (int i = 0; i < 100; i++)
{
StringBuilder builder1 = new StringBuilder();
StringBuilder builder2 = new StringBuilder();
for (int j = 0; j < validCharLength; j++)
{
char c = validChars[random.Next(validChars.Count)];
builder1.Append(c);
builder2.Append(c);
}
for (int j = 0; j < invalidCharLength; j++)
builder1.Append(invalidChars[random.Next(invalidChars.Count)]);
string input = builder1.ToString();
Match match = regex.Match(input);
Assert.Equal(builder2.ToString(), match.Value);
Assert.Equal(0, match.Index);
Assert.Equal(validCharLength, match.Length);
match = match.NextMatch();
Assert.False(match.Success);
}
// Build a random string with invalid characters, valid and then again invalid
regex = new Regex(@"\d+");
validCharLength = 10;
invalidCharLength = 15;
for (int i = 0; i < 100; i++)
{
StringBuilder builder1 = new StringBuilder();
StringBuilder builder2 = new StringBuilder();
for (int j = 0; j < invalidCharLength; j++)
builder1.Append(invalidChars[random.Next(invalidChars.Count)]);
for (int j = 0; j < validCharLength; j++)
{
char c = validChars[random.Next(validChars.Count)];
builder1.Append(c);
builder2.Append(c);
}
for (int j = 0; j < invalidCharLength; j++)
builder1.Append(invalidChars[random.Next(invalidChars.Count)]);
string input = builder1.ToString();
Match match = regex.Match(input);
Assert.True(match.Success);
Assert.Equal(builder2.ToString(), match.Value);
Assert.Equal(invalidCharLength, match.Index);
Assert.Equal(validCharLength, match.Length);
match = match.NextMatch();
Assert.False(match.Success);
}
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="FramingSpec.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Akka.IO;
using Akka.Streams.Dsl;
using Akka.Streams.Stage;
using Akka.Streams.TestKit.Tests;
using Akka.Util;
using FluentAssertions;
using Xunit;
using Xunit.Abstractions;
namespace Akka.Streams.Tests.Dsl
{
public class FramingSpec : AkkaSpec
{
private readonly ITestOutputHelper _helper;
private ActorMaterializer Materializer { get; }
public FramingSpec(ITestOutputHelper helper) : base(helper)
{
_helper = helper;
var settings = ActorMaterializerSettings.Create(Sys);
Materializer = ActorMaterializer.Create(Sys, settings);
}
private sealed class Rechunker : PushPullStage<ByteString, ByteString>
{
private ByteString _rechunkBuffer = ByteString.Empty;
public override ISyncDirective OnPush(ByteString element, IContext<ByteString> context)
{
_rechunkBuffer += element;
return Rechunk(context);
}
public override ISyncDirective OnPull(IContext<ByteString> context) => Rechunk(context);
public override ITerminationDirective OnUpstreamFinish(IContext<ByteString> context)
=> _rechunkBuffer.IsEmpty ? context.Finish() : context.AbsorbTermination();
private ISyncDirective Rechunk(IContext<ByteString> context)
{
if (!context.IsFinishing && ThreadLocalRandom.Current.Next(1, 3) == 2)
return context.Pull();
var nextChunkSize = _rechunkBuffer.IsEmpty
? 0
: ThreadLocalRandom.Current.Next(0, _rechunkBuffer.Count + 1);
var newChunk = _rechunkBuffer.Take(nextChunkSize);
_rechunkBuffer = _rechunkBuffer.Drop(nextChunkSize);
return context.IsFinishing && _rechunkBuffer.IsEmpty
? context.PushAndFinish(newChunk)
: context.Push(newChunk);
}
}
private Flow<ByteString, ByteString, NotUsed> Rechunk
=> Flow.Create<ByteString>().Transform(() => new Rechunker()).Named("rechunker");
private static readonly List<ByteString> DelimiterBytes =
new List<string> {"\n", "\r\n", "FOO"}.Select(ByteString.FromString).ToList();
private static readonly List<ByteString> BaseTestSequences =
new List<string> { "", "foo", "hello world" }.Select(ByteString.FromString).ToList();
private static Flow<ByteString, string, NotUsed> SimpleLines(string delimiter, int maximumBytes, bool allowTruncation = true)
{
return Framing.Delimiter(ByteString.FromString(delimiter), maximumBytes, allowTruncation)
.Select(x => x.DecodeString(Encoding.UTF8)).Named("LineFraming");
}
private static IEnumerable<ByteString> CompleteTestSequence(ByteString delimiter)
{
for (var i = 0; i < delimiter.Count; i++)
foreach (var sequence in BaseTestSequences)
yield return delimiter.Take(i) + sequence;
}
[Fact]
public void Delimiter_bytes_based_framing_must_work_with_various_delimiters_and_test_sequences()
{
for (var i = 1; i <= 100; i++)
{
foreach (var delimiter in DelimiterBytes)
{
var task = Source.From(CompleteTestSequence(delimiter))
.Select(x => x + delimiter)
.Via(Rechunk)
.Via(Framing.Delimiter(delimiter, 256))
.Grouped(1000)
.RunWith(Sink.First<IEnumerable<ByteString>>(), Materializer);
task.Wait(TimeSpan.FromDays(3)).Should().BeTrue();
task.Result.ShouldAllBeEquivalentTo(CompleteTestSequence(delimiter));
}
}
}
[Fact]
public void Delimiter_bytes_based_framing_must_respect_maximum_line_settings()
{
var task1 = Source.Single(ByteString.FromString("a\nb\nc\nd\n"))
.Via(SimpleLines("\n", 1))
.Limit(100)
.RunWith(Sink.Seq<string>(), Materializer);
task1.Wait(TimeSpan.FromDays(3)).Should().BeTrue();
task1.Result.ShouldAllBeEquivalentTo(new[] {"a", "b", "c", "d"});
var task2 =
Source.Single(ByteString.FromString("ab\n"))
.Via(SimpleLines("\n", 1))
.Limit(100)
.RunWith(Sink.Seq<string>(), Materializer);
task2.Invoking(t => t.Wait(TimeSpan.FromSeconds(3))).ShouldThrow<Framing.FramingException>();
}
[Fact]
public void Delimiter_bytes_based_framing_must_work_with_empty_streams()
{
var task = Source.Empty<ByteString>().Via(SimpleLines("\n", 256)).RunAggregate(new List<string>(), (list, s) =>
{
list.Add(s);
return list;
}, Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
task.Result.Should().BeEmpty();
}
[Fact]
public void Delimiter_bytes_based_framing_must_report_truncated_frames()
{
var task =
Source.Single(ByteString.FromString("I habe no end"))
.Via(SimpleLines("\n", 256, false))
.Grouped(1000)
.RunWith(Sink.First<IEnumerable<string>>(), Materializer);
task.Invoking(t => t.Wait(TimeSpan.FromSeconds(3))).ShouldThrow<Framing.FramingException>();
}
[Fact]
public void Delimiter_bytes_based_framing_must_allow_truncated_frames_if_configured_so()
{
var task =
Source.Single(ByteString.FromString("I have no end"))
.Via(SimpleLines("\n", 256))
.Grouped(1000)
.RunWith(Sink.First<IEnumerable<string>>(), Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
task.Result.Should().ContainSingle(s => s.Equals("I have no end"));
}
private static string RandomString(int length)
{
const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
var random = new Random();
return new string(Enumerable.Repeat(chars, length)
.Select(s => s[random.Next(s.Length)]).ToArray());
}
private static readonly ByteString ReferenceChunk = ByteString.FromString(RandomString(0x100001));
private static readonly List<ByteOrder> ByteOrders = new List<ByteOrder>
{
ByteOrder.BigEndian,
ByteOrder.LittleEndian
};
private static readonly List<int> FrameLengths = new List<int>
{
0,
1,
2,
3,
0xFF,
0x100,
0x101,
0xFFF,
0x1000,
0x1001,
0xFFFF,
0x10000,
0x10001
};
private static readonly List<int> FieldLengths = new List<int> {1, 2, 3, 4};
private static readonly List<int> FieldOffsets = new List<int> {0, 1, 2, 3, 15, 16, 31, 32, 44, 107};
private static ByteString Encode(ByteString payload, int fieldOffset, int fieldLength, ByteOrder byteOrder)
{
var h = new ByteStringBuilder().PutInt(payload.Count, byteOrder).Result();
var header = byteOrder == ByteOrder.LittleEndian ? h.Take(fieldLength) : h.Drop(4 - fieldLength);
return ByteString.Create(new byte[fieldOffset]) + header + payload;
}
[Fact]
public void Length_field_based_framing_must_work_with_various_byte_orders_frame_lengths_and_offsets()
{
var counter = 1;
foreach (var byteOrder in ByteOrders)
{
foreach (var fieldOffset in FieldOffsets)
{
foreach (var fieldLength in FieldLengths)
{
var encodedFrames = FrameLengths.Where(x => x < 1L << (fieldLength * 8)).Select(length =>
{
var payload = ReferenceChunk.Take(length);
return Encode(payload, fieldOffset, fieldLength, byteOrder);
}).ToList();
var task = Source.From(encodedFrames)
.Via(Rechunk)
.Via(Framing.LengthField(fieldLength, int.MaxValue, fieldOffset, byteOrder))
.Grouped(10000)
.RunWith(Sink.First<IEnumerable<ByteString>>(), Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
task.Result.ShouldAllBeEquivalentTo(encodedFrames);
_helper.WriteLine($"{counter++} from 80 passed");
}
}
}
}
[Fact]
public void Length_field_based_framing_must_work_with_empty_streams()
{
var task = Source.Empty<ByteString>()
.Via(Framing.LengthField(4, int.MaxValue, 0, ByteOrder.BigEndian))
.RunAggregate(new List<ByteString>(), (list, s) =>
{
list.Add(s);
return list;
}, Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
task.Result.Should().BeEmpty();
}
[Fact]
public void Length_field_based_framing_must_report_oversized_frames()
{
var task1 = Source.Single(Encode(ReferenceChunk.Take(100), 0, 1, ByteOrder.BigEndian))
.Via(Framing.LengthField(1, 99, 0, ByteOrder.BigEndian))
.RunAggregate(new List<ByteString>(), (list, s) =>
{
list.Add(s);
return list;
}, Materializer);
task1.Invoking(t => t.Wait(TimeSpan.FromSeconds(3))).ShouldThrow<Framing.FramingException>();
var task2 = Source.Single(Encode(ReferenceChunk.Take(100), 49, 1, ByteOrder.BigEndian))
.Via(Framing.LengthField(1, 100, 0, ByteOrder.BigEndian))
.RunAggregate(new List<ByteString>(), (list, s) =>
{
list.Add(s);
return list;
}, Materializer);
task2.Invoking(t => t.Wait(TimeSpan.FromSeconds(3))).ShouldThrow<Framing.FramingException>();
}
[Fact]
public void Length_field_based_framing_must_report_truncated_frames()
{
foreach (var byteOrder in ByteOrders)
{
foreach (var fieldOffset in FieldOffsets)
{
foreach (var fieldLength in FieldLengths)
{
foreach (var frameLength in FrameLengths.Where(f => f < 1 << (fieldLength * 8) && f != 0))
{
var fullFrame = Encode(ReferenceChunk.Take(frameLength), fieldOffset, fieldLength, byteOrder);
var partialFrame = fullFrame.DropRight(1);
Action action = () =>
{
Source.From(new[] {fullFrame, partialFrame})
.Via(Rechunk)
.Via(Framing.LengthField(fieldLength, int.MaxValue, fieldOffset, byteOrder))
.Grouped(10000)
.RunWith(Sink.First<IEnumerable<ByteString>>(), Materializer)
.Wait(TimeSpan.FromSeconds(5));
};
action.ShouldThrow<Framing.FramingException>();
}
}
}
}
}
[Fact]
public void Length_field_based_framing_must_support_simple_framing_adapter()
{
var rechunkBidi = BidiFlow.FromFlowsMat(Rechunk, Rechunk, Keep.Left);
var codecFlow = Framing.SimpleFramingProtocol(1024)
.Atop(rechunkBidi)
.Atop(Framing.SimpleFramingProtocol(1024).Reversed())
.Join(Flow.Create<ByteString>()); // Loopback
var random= new Random();
var testMessages = Enumerable.Range(1, 100).Select(_ => ReferenceChunk.Take(random.Next(1024))).ToList();
var task = Source.From(testMessages)
.Via(codecFlow)
.Limit(1000)
.RunWith(Sink.Seq<ByteString>(), Materializer);
task.Wait(TimeSpan.FromSeconds(3)).Should().BeTrue();
task.Result.ShouldAllBeEquivalentTo(testMessages);
}
}
}
| |
using J2N.Numerics;
using Lucene.Net.Support;
using System;
using System.Diagnostics;
namespace Lucene.Net.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// BitSet of fixed length (<see cref="numBits"/>), backed by accessible (<see cref="GetBits()"/>)
/// <see cref="T:long[]"/>, accessed with a <see cref="long"/> index. Use it only if you intend to store more
/// than 2.1B bits, otherwise you should use <see cref="FixedBitSet"/>.
/// <para/>
/// NOTE: This was LongBitSet in Lucene
/// <para/>
/// @lucene.internal
/// </summary>
#if FEATURE_SERIALIZABLE
[Serializable]
#endif
public sealed class Int64BitSet
{
private readonly long[] bits;
private readonly long numBits;
private readonly int numWords;
/// <summary>
/// If the given <see cref="Int64BitSet"/> is large enough to hold
/// <paramref name="numBits"/>, returns the given <paramref name="bits"/>, otherwise returns a new
/// <see cref="Int64BitSet"/> which can hold the requested number of bits.
///
/// <para/>
/// <b>NOTE:</b> the returned bitset reuses the underlying <see cref="T:long[]"/> of
/// the given <paramref name="bits"/> if possible. Also, reading <see cref="Length"/> on the
/// returned bits may return a value greater than <paramref name="numBits"/>.
/// </summary>
public static Int64BitSet EnsureCapacity(Int64BitSet bits, long numBits)
{
if (numBits < bits.Length)
{
return bits;
}
else
{
int numWords = Bits2words(numBits);
long[] arr = bits.GetBits();
if (numWords >= arr.Length)
{
arr = ArrayUtil.Grow(arr, numWords + 1);
}
return new Int64BitSet(arr, arr.Length << 6);
}
}
/// <summary>
/// Returns the number of 64 bit words it would take to hold <paramref name="numBits"/>. </summary>
public static int Bits2words(long numBits)
{
int numLong = (int)((long)((ulong)numBits >> 6));
if ((numBits & 63) != 0)
{
numLong++;
}
return numLong;
}
public Int64BitSet(long numBits)
{
this.numBits = numBits;
bits = new long[Bits2words(numBits)];
numWords = bits.Length;
}
public Int64BitSet(long[] storedBits, long numBits)
{
this.numWords = Bits2words(numBits);
if (numWords > storedBits.Length)
{
throw new ArgumentException("The given long array is too small to hold " + numBits + " bits");
}
this.numBits = numBits;
this.bits = storedBits;
}
/// <summary>
/// Returns the number of bits stored in this bitset. </summary>
public long Length => numBits;
/// <summary>
/// Expert. </summary>
[WritableArray]
public long[] GetBits()
{
return bits;
}
/// <summary>
/// Returns number of set bits. NOTE: this visits every
/// long in the backing bits array, and the result is not
/// internally cached!
/// </summary>
public long Cardinality()
{
return BitUtil.Pop_Array(bits, 0, bits.Length);
}
public bool Get(long index)
{
Debug.Assert(index >= 0 && index < numBits, "index=" + index);
int i = (int)(index >> 6); // div 64
// signed shift will keep a negative index and force an
// array-index-out-of-bounds-exception, removing the need for an explicit check.
int bit = (int)(index & 0x3f); // mod 64
long bitmask = 1L << bit;
return (bits[i] & bitmask) != 0;
}
public void Set(long index)
{
Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)(index & 0x3f); // mod 64
long bitmask = 1L << bit;
bits[wordNum] |= bitmask;
}
public bool GetAndSet(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)(index & 0x3f); // mod 64
long bitmask = 1L << bit;
bool val = (bits[wordNum] & bitmask) != 0;
bits[wordNum] |= bitmask;
return val;
}
public void Clear(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6);
int bit = (int)(index & 0x03f);
long bitmask = 1L << bit;
bits[wordNum] &= ~bitmask;
}
public bool GetAndClear(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int wordNum = (int)(index >> 6); // div 64
int bit = (int)(index & 0x3f); // mod 64
long bitmask = 1L << bit;
bool val = (bits[wordNum] & bitmask) != 0;
bits[wordNum] &= ~bitmask;
return val;
}
/// <summary>
/// Returns the index of the first set bit starting at the <paramref name="index"/> specified.
/// -1 is returned if there are no more set bits.
/// </summary>
public long NextSetBit(long index)
{
Debug.Assert(index >= 0 && index < numBits);
int i = (int)(index >> 6);
int subIndex = (int)(index & 0x3f); // index within the word
long word = bits[i] >> subIndex; // skip all the bits to the right of index
if (word != 0)
{
return index + word.TrailingZeroCount();
}
while (++i < numWords)
{
word = bits[i];
if (word != 0)
{
return (i << 6) + word.TrailingZeroCount();
}
}
return -1;
}
/// <summary>
/// Returns the index of the last set bit before or on the <paramref name="index"/> specified.
/// -1 is returned if there are no more set bits.
/// </summary>
public long PrevSetBit(long index)
{
Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits);
int i = (int)(index >> 6);
int subIndex = (int)(index & 0x3f); // index within the word
long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index
if (word != 0)
{
return (i << 6) + subIndex - word.LeadingZeroCount(); // See LUCENE-3197
}
while (--i >= 0)
{
word = bits[i];
if (word != 0)
{
return (i << 6) + 63 - word.LeadingZeroCount();
}
}
return -1;
}
/// <summary>
/// this = this OR other </summary>
public void Or(Int64BitSet other)
{
Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords);
int pos = Math.Min(numWords, other.numWords);
while (--pos >= 0)
{
bits[pos] |= other.bits[pos];
}
}
/// <summary>
/// this = this XOR other </summary>
public void Xor(Int64BitSet other)
{
Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords);
int pos = Math.Min(numWords, other.numWords);
while (--pos >= 0)
{
bits[pos] ^= other.bits[pos];
}
}
/// <summary>
/// Returns <c>true</c> if the sets have any elements in common </summary>
public bool Intersects(Int64BitSet other)
{
int pos = Math.Min(numWords, other.numWords);
while (--pos >= 0)
{
if ((bits[pos] & other.bits[pos]) != 0)
{
return true;
}
}
return false;
}
/// <summary>
/// this = this AND other </summary>
public void And(Int64BitSet other)
{
int pos = Math.Min(numWords, other.numWords);
while (--pos >= 0)
{
bits[pos] &= other.bits[pos];
}
if (numWords > other.numWords)
{
Arrays.Fill(bits, other.numWords, numWords, 0L);
}
}
/// <summary>
/// this = this AND NOT other </summary>
public void AndNot(Int64BitSet other)
{
int pos = Math.Min(numWords, other.bits.Length);
while (--pos >= 0)
{
bits[pos] &= ~other.bits[pos];
}
}
// NOTE: no .isEmpty() here because that's trappy (ie,
// typically isEmpty is low cost, but this one wouldn't
// be)
/// <summary>
/// Flips a range of bits
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to flip </param>
public void Flip(long startIndex, long endIndex)
{
Debug.Assert(startIndex >= 0 && startIndex < numBits);
Debug.Assert(endIndex >= 0 && endIndex <= numBits);
if (endIndex <= startIndex)
{
return;
}
int startWord = (int)(startIndex >> 6);
int endWord = (int)((endIndex - 1) >> 6);
/*
///* Grrr, java shifting wraps around so -1L>>>64 == -1
/// for that reason, make sure not to use endmask if the bits to flip will
/// be zero in the last word (redefine endWord to be the last changed...)
/// long startmask = -1L << (startIndex & 0x3f); // example: 11111...111000
/// long endmask = -1L >>> (64-(endIndex & 0x3f)); // example: 00111...111111
/// **
*/
long startmask = -1L << (int)startIndex;
long endmask = (long)(unchecked(((ulong)-1L)) >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
if (startWord == endWord)
{
bits[startWord] ^= (startmask & endmask);
return;
}
bits[startWord] ^= startmask;
for (int i = startWord + 1; i < endWord; i++)
{
bits[i] = ~bits[i];
}
bits[endWord] ^= endmask;
}
/// <summary>
/// Sets a range of bits
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to set </param>
public void Set(long startIndex, long endIndex)
{
Debug.Assert(startIndex >= 0 && startIndex < numBits);
Debug.Assert(endIndex >= 0 && endIndex <= numBits);
if (endIndex <= startIndex)
{
return;
}
int startWord = (int)(startIndex >> 6);
int endWord = (int)((endIndex - 1) >> 6);
long startmask = -1L << (int)startIndex;
long endmask = (long)(0xffffffffffffffffUL >> (int)-endIndex);//-(int)((uint)1L >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap
if (startWord == endWord)
{
bits[startWord] |= (startmask & endmask);
return;
}
bits[startWord] |= startmask;
Arrays.Fill(bits, startWord + 1, endWord, -1L);
bits[endWord] |= endmask;
}
/// <summary>
/// Clears a range of bits.
/// </summary>
/// <param name="startIndex"> Lower index </param>
/// <param name="endIndex"> One-past the last bit to clear </param>
public void Clear(long startIndex, long endIndex)
{
Debug.Assert(startIndex >= 0 && startIndex < numBits);
Debug.Assert(endIndex >= 0 && endIndex <= numBits);
if (endIndex <= startIndex)
{
return;
}
int startWord = (int)(startIndex >> 6);
int endWord = (int)((endIndex - 1) >> 6);
// Casting long to int discards MSBs, so it is no problem because we are taking mod 64.
long startmask = (-1L) << (int)startIndex; // -1 << (startIndex mod 64)
long endmask = (-1L) << (int)endIndex; // -1 << (endIndex mod 64)
if ((endIndex & 0x3f) == 0)
{
endmask = 0;
}
startmask = ~startmask;
if (startWord == endWord)
{
bits[startWord] &= (startmask | endmask);
return;
}
bits[startWord] &= startmask;
Arrays.Fill(bits, startWord + 1, endWord, 0L);
bits[endWord] &= endmask;
}
public Int64BitSet Clone()
{
long[] bits = new long[this.bits.Length];
Array.Copy(this.bits, 0, bits, 0, bits.Length);
return new Int64BitSet(bits, numBits);
}
/// <summary>
/// Returns <c>true</c> if both sets have the same bits set </summary>
public override bool Equals(object o)
{
if (this == o)
{
return true;
}
if (!(o is Int64BitSet))
{
return false;
}
Int64BitSet other = (Int64BitSet)o;
if (numBits != other.Length)
{
return false;
}
return Arrays.Equals(bits, other.bits);
}
public override int GetHashCode()
{
long h = 0;
for (int i = numWords; --i >= 0; )
{
h ^= bits[i];
h = (h << 1) | ((long)((ulong)h >> 63)); // rotate left
}
// fold leftmost bits into right and add a constant to prevent
// empty sets from returning 0, which is too common.
return (int)((h >> 32) ^ h) + unchecked((int)0x98761234);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Host;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Options;
using Microsoft.Isam.Esent;
using Microsoft.Isam.Esent.Interop;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Esent
{
internal partial class EsentPersistentStorage : AbstractPersistentStorage
{
private const string StorageExtension = "vbcs.cache";
private const string PersistentStorageFileName = "storage.ide";
// cache delegates so that we don't re-create it every times
private readonly Func<int, object, object, object, CancellationToken, Stream> _readStreamSolution;
private readonly Func<EsentStorage.Key, int, object, object, CancellationToken, Stream> _readStream;
private readonly Func<int, Stream, object, object, CancellationToken, bool> _writeStreamSolution;
private readonly Func<EsentStorage.Key, int, Stream, object, CancellationToken, bool> _writeStream;
private readonly ConcurrentDictionary<string, int> _nameTableCache;
private readonly EsentStorage _esentStorage;
public EsentPersistentStorage(
IOptionService optionService, string workingFolderPath, string solutionFilePath, Action<AbstractPersistentStorage> disposer) :
base(optionService, workingFolderPath, solutionFilePath, disposer)
{
// cache delegates
_readStreamSolution = ReadStreamSolution;
_readStream = ReadStream;
_writeStreamSolution = WriteStreamSolution;
_writeStream = WriteStream;
// solution must exist in disk. otherwise, we shouldn't be here at all.
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(solutionFilePath));
var databaseFile = GetDatabaseFile(workingFolderPath);
this.EsentDirectory = Path.GetDirectoryName(databaseFile);
if (!Directory.Exists(this.EsentDirectory))
{
Directory.CreateDirectory(this.EsentDirectory);
}
_nameTableCache = new ConcurrentDictionary<string, int>(StringComparer.OrdinalIgnoreCase);
var enablePerformanceMonitor = optionService.GetOption(PersistentStorageOptions.EsentPerformanceMonitor);
_esentStorage = new EsentStorage(databaseFile, enablePerformanceMonitor);
}
public static string GetDatabaseFile(string workingFolderPath)
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(workingFolderPath));
return Path.Combine(workingFolderPath, StorageExtension, PersistentStorageFileName);
}
public void Initialize()
{
_esentStorage.Initialize();
}
public string EsentDirectory { get; }
public override Task<Stream> ReadStreamAsync(Document document, string name, CancellationToken cancellationToken = default(CancellationToken))
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(name));
if (!PersistenceEnabled)
{
return SpecializedTasks.Default<Stream>();
}
if (!TryGetProjectAndDocumentKey(document, out var key) ||
!TryGetUniqueNameId(name, out var nameId))
{
return SpecializedTasks.Default<Stream>();
}
var stream = EsentExceptionWrapper(key, nameId, _readStream, cancellationToken);
return SpecializedTasks.DefaultOrResult(stream);
}
public override Task<Stream> ReadStreamAsync(Project project, string name, CancellationToken cancellationToken = default(CancellationToken))
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(name));
if (!PersistenceEnabled)
{
return SpecializedTasks.Default<Stream>();
}
if (!TryGetProjectKey(project, out var key) ||
!TryGetUniqueNameId(name, out var nameId))
{
return SpecializedTasks.Default<Stream>();
}
var stream = EsentExceptionWrapper(key, nameId, _readStream, cancellationToken);
return SpecializedTasks.DefaultOrResult(stream);
}
public override Task<Stream> ReadStreamAsync(string name, CancellationToken cancellationToken = default(CancellationToken))
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(name));
if (!PersistenceEnabled)
{
return SpecializedTasks.Default<Stream>();
}
if (!TryGetUniqueNameId(name, out var nameId))
{
return SpecializedTasks.Default<Stream>();
}
var stream = EsentExceptionWrapper(nameId, _readStreamSolution, cancellationToken);
return SpecializedTasks.DefaultOrResult(stream);
}
private Stream ReadStream(EsentStorage.Key key, int nameId, object unused1, object unused2, CancellationToken cancellationToken)
{
using (var accessor = GetAccessor(key))
using (var esentStream = accessor.GetReadStream(key, nameId))
{
if (esentStream == null)
{
return null;
}
// this will copy over esent stream and let it go.
return SerializableBytes.CreateReadableStream(esentStream, cancellationToken);
}
}
private Stream ReadStreamSolution(int nameId, object unused1, object unused2, object unused3, CancellationToken cancellationToken)
{
using (var accessor = _esentStorage.GetSolutionTableAccessor())
using (var esentStream = accessor.GetReadStream(nameId))
{
if (esentStream == null)
{
return null;
}
// this will copy over esent stream and let it go.
return SerializableBytes.CreateReadableStream(esentStream, cancellationToken);
}
}
public override Task<bool> WriteStreamAsync(Document document, string name, Stream stream, CancellationToken cancellationToken = default(CancellationToken))
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(name));
Contract.ThrowIfNull(stream);
if (!PersistenceEnabled)
{
return SpecializedTasks.False;
}
if (!TryGetProjectAndDocumentKey(document, out var key) ||
!TryGetUniqueNameId(name, out var nameId))
{
return SpecializedTasks.False;
}
var success = EsentExceptionWrapper(key, nameId, stream, _writeStream, cancellationToken);
return success ? SpecializedTasks.True : SpecializedTasks.False;
}
public override Task<bool> WriteStreamAsync(Project project, string name, Stream stream, CancellationToken cancellationToken = default(CancellationToken))
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(name));
Contract.ThrowIfNull(stream);
if (!PersistenceEnabled)
{
return SpecializedTasks.False;
}
if (!TryGetProjectKey(project, out var key) ||
!TryGetUniqueNameId(name, out var nameId))
{
return SpecializedTasks.False;
}
var success = EsentExceptionWrapper(key, nameId, stream, _writeStream, cancellationToken);
return success ? SpecializedTasks.True : SpecializedTasks.False;
}
public override Task<bool> WriteStreamAsync(string name, Stream stream, CancellationToken cancellationToken = default(CancellationToken))
{
Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(name));
Contract.ThrowIfNull(stream);
if (!PersistenceEnabled)
{
return SpecializedTasks.False;
}
if (!TryGetUniqueNameId(name, out var nameId))
{
return SpecializedTasks.False;
}
var success = EsentExceptionWrapper(nameId, stream, _writeStreamSolution, cancellationToken);
return success ? SpecializedTasks.True : SpecializedTasks.False;
}
private bool WriteStream(EsentStorage.Key key, int nameId, Stream stream, object unused1, CancellationToken cancellationToken)
{
using (var accessor = GetAccessor(key))
using (var esentStream = accessor.GetWriteStream(key, nameId))
{
WriteToStream(stream, esentStream, cancellationToken);
return accessor.ApplyChanges();
}
}
private bool WriteStreamSolution(int nameId, Stream stream, object unused1, object unused2, CancellationToken cancellationToken)
{
using (var accessor = _esentStorage.GetSolutionTableAccessor())
using (var esentStream = accessor.GetWriteStream(nameId))
{
WriteToStream(stream, esentStream, cancellationToken);
return accessor.ApplyChanges();
}
}
public override void Close()
{
_esentStorage.Close();
}
private bool TryGetUniqueNameId(string name, out int id)
{
return TryGetUniqueId(name, false, out id);
}
private bool TryGetUniqueFileId(string path, out int id)
{
return TryGetUniqueId(path, true, out id);
}
private bool TryGetUniqueId(string value, bool fileCheck, out int id)
{
id = default(int);
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
// if we already know, get the id
if (_nameTableCache.TryGetValue(value, out id))
{
return true;
}
// we only persist for things that actually exist
if (fileCheck && !File.Exists(value))
{
return false;
}
try
{
var uniqueIdValue = fileCheck ? FilePathUtilities.GetRelativePath(Path.GetDirectoryName(SolutionFilePath), value) : value;
id = _nameTableCache.GetOrAdd(value, _esentStorage.GetUniqueId(uniqueIdValue));
return true;
}
catch (Exception ex)
{
// if we get fatal errors from esent such as disk out of space or log file corrupted by other process and etc
// don't crash VS, but let VS know it can't use esent. we will gracefully recover issue by using memory.
EsentLogger.LogException(ex);
return false;
}
}
private static void WriteToStream(Stream inputStream, Stream esentStream, CancellationToken cancellationToken)
{
var buffer = SharedPools.ByteArray.Allocate();
try
{
int bytesRead;
do
{
cancellationToken.ThrowIfCancellationRequested();
bytesRead = inputStream.Read(buffer, 0, buffer.Length);
if (bytesRead > 0)
{
esentStream.Write(buffer, 0, bytesRead);
}
}
while (bytesRead > 0);
// flush the data and trim column size of necessary
esentStream.Flush();
}
finally
{
SharedPools.ByteArray.Free(buffer);
}
}
private EsentStorage.ProjectDocumentTableAccessor GetAccessor(EsentStorage.Key key)
{
return key.DocumentIdOpt.HasValue ?
_esentStorage.GetDocumentTableAccessor() :
(EsentStorage.ProjectDocumentTableAccessor)_esentStorage.GetProjectTableAccessor();
}
private bool TryGetProjectAndDocumentKey(Document document, out EsentStorage.Key key)
{
key = default(EsentStorage.Key);
if (!TryGetProjectId(document.Project, out var projectId, out var projectNameId) ||
!TryGetUniqueFileId(document.FilePath, out var documentId))
{
return false;
}
key = new EsentStorage.Key(projectId, projectNameId, documentId);
return true;
}
private bool TryGetProjectKey(Project project, out EsentStorage.Key key)
{
key = default(EsentStorage.Key);
if (!TryGetProjectId(project, out var projectId, out var projectNameId))
{
return false;
}
key = new EsentStorage.Key(projectId, projectNameId);
return true;
}
private bool TryGetProjectId(Project project, out int projectId, out int projectNameId)
{
projectId = default(int);
projectNameId = default(int);
return TryGetUniqueFileId(project.FilePath, out projectId) && TryGetUniqueNameId(project.Name, out projectNameId);
}
private TResult EsentExceptionWrapper<TArg1, TResult>(TArg1 arg1, Func<TArg1, object, object, object, CancellationToken, TResult> func, CancellationToken cancellationToken)
{
return EsentExceptionWrapper(arg1, (object)null, func, cancellationToken);
}
private TResult EsentExceptionWrapper<TArg1, TArg2, TResult>(
TArg1 arg1, TArg2 arg2, Func<TArg1, TArg2, object, object, CancellationToken, TResult> func, CancellationToken cancellationToken)
{
return EsentExceptionWrapper(arg1, arg2, (object)null, func, cancellationToken);
}
private TResult EsentExceptionWrapper<TArg1, TArg2, TArg3, TResult>(
TArg1 arg1, TArg2 arg2, TArg3 arg3, Func<TArg1, TArg2, TArg3, object, CancellationToken, TResult> func, CancellationToken cancellationToken)
{
return EsentExceptionWrapper(arg1, arg2, arg3, (object)null, func, cancellationToken);
}
private TResult EsentExceptionWrapper<TArg1, TArg2, TArg3, TArg4, TResult>(
TArg1 arg1, TArg2 arg2, TArg3 arg3, TArg4 arg4, Func<TArg1, TArg2, TArg3, TArg4, CancellationToken, TResult> func, CancellationToken cancellationToken)
{
try
{
return func(arg1, arg2, arg3, arg4, cancellationToken);
}
catch (EsentInvalidSesidException)
{
// operation was in-fly when Esent instance was shutdown - ignore the error
}
catch (OperationCanceledException)
{
}
catch (EsentException ex)
{
if (!_esentStorage.IsClosed)
{
// ignore esent exception if underlying storage was closed
// there is not much we can do here.
// internally we use it as a way to cache information between sessions anyway.
// no functionality will be affected by this except perf
Logger.Log(FunctionId.PersistenceService_WriteAsyncFailed, "Esent Failed : " + ex.Message);
}
}
catch (Exception ex)
{
// ignore exception
// there is not much we can do here.
// internally we use it as a way to cache information between sessions anyway.
// no functionality will be affected by this except perf
Logger.Log(FunctionId.PersistenceService_WriteAsyncFailed, "Failed : " + ex.Message);
}
return default(TResult);
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime;
using System.Runtime.Serialization;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Orleans.CodeGeneration;
using Orleans.GrainDirectory;
using Orleans.MultiCluster;
using Orleans.Providers;
using Orleans.Runtime.Configuration;
using Orleans.Runtime.ConsistentRing;
using Orleans.Runtime.Counters;
using Orleans.Runtime.GrainDirectory;
using Orleans.Runtime.MembershipService;
using Orleans.Runtime.Messaging;
using Orleans.Runtime.MultiClusterNetwork;
using Orleans.Runtime.Placement;
using Orleans.Runtime.Providers;
using Orleans.Runtime.Scheduler;
using Orleans.Runtime.Startup;
using Orleans.Runtime.Storage;
using Orleans.Serialization;
using Orleans.Storage;
using Orleans.Streams;
using Orleans.Timers;
namespace Orleans.Runtime
{
/// <summary>
/// Orleans silo.
/// </summary>
public class Silo
#if !NETSTANDARD_TODO
: MarshalByRefObject // for hosting multiple silos in app domains of the same process
#endif
{
/// <summary> Standard name for Primary silo. </summary>
public const string PrimarySiloName = "Primary";
/// <summary> Silo Types. </summary>
public enum SiloType
{
/// <summary> No silo type specified. </summary>
None = 0,
/// <summary> Primary silo. </summary>
Primary,
/// <summary> Secondary silo. </summary>
Secondary,
}
/// <summary> Type of this silo. </summary>
public SiloType Type
{
get { return siloType; }
}
private readonly GlobalConfiguration globalConfig;
private NodeConfiguration nodeConfig;
private readonly ISiloMessageCenter messageCenter;
private readonly OrleansTaskScheduler scheduler;
private readonly LocalGrainDirectory localGrainDirectory;
private readonly ActivationDirectory activationDirectory;
private readonly IncomingMessageAgent incomingAgent;
private readonly IncomingMessageAgent incomingSystemAgent;
private readonly IncomingMessageAgent incomingPingAgent;
private readonly Logger logger;
private readonly GrainTypeManager typeManager;
private readonly ManualResetEvent siloTerminatedEvent;
private readonly SiloType siloType;
private readonly SiloStatisticsManager siloStatistics;
private readonly MembershipFactory membershipFactory;
private readonly MultiClusterOracleFactory multiClusterFactory;
private StorageProviderManager storageProviderManager;
private StatisticsProviderManager statisticsProviderManager;
private BootstrapProviderManager bootstrapProviderManager;
private readonly LocalReminderServiceFactory reminderFactory;
private IReminderService reminderService;
private ProviderManagerSystemTarget providerManagerSystemTarget;
private IMembershipOracle membershipOracle;
private IMultiClusterOracle multiClusterOracle;
private ClientObserverRegistrar clientRegistrar;
private Watchdog platformWatchdog;
private readonly TimeSpan initTimeout;
private readonly TimeSpan stopTimeout = TimeSpan.FromMinutes(1);
private readonly Catalog catalog;
private readonly List<IHealthCheckParticipant> healthCheckParticipants;
private readonly object lockable = new object();
private readonly GrainFactory grainFactory;
private readonly IGrainRuntime grainRuntime;
private readonly List<IProvider> allSiloProviders;
internal readonly string Name;
internal readonly string SiloIdentity;
internal ClusterConfiguration OrleansConfig { get; set; }
internal GlobalConfiguration GlobalConfig { get { return globalConfig; } }
internal NodeConfiguration LocalConfig { get { return nodeConfig; } }
internal ISiloMessageCenter LocalMessageCenter { get { return messageCenter; } }
internal OrleansTaskScheduler LocalScheduler { get { return scheduler; } }
internal GrainTypeManager LocalTypeManager { get { return typeManager; } }
internal ILocalGrainDirectory LocalGrainDirectory { get { return localGrainDirectory; } }
internal ISiloStatusOracle LocalSiloStatusOracle { get { return membershipOracle; } }
internal IMultiClusterOracle LocalMultiClusterOracle { get { return multiClusterOracle; } }
internal IConsistentRingProvider RingProvider { get; private set; }
internal IStorageProviderManager StorageProviderManager { get { return storageProviderManager; } }
internal IProviderManager StatisticsProviderManager { get { return statisticsProviderManager; } }
internal IStreamProviderManager StreamProviderManager { get { return grainRuntime.StreamProviderManager; } }
internal IList<IBootstrapProvider> BootstrapProviders { get; private set; }
internal ISiloPerformanceMetrics Metrics { get { return siloStatistics.MetricsTable; } }
internal static Silo CurrentSilo { get; private set; }
internal IReadOnlyCollection<IProvider> AllSiloProviders
{
get { return allSiloProviders.AsReadOnly(); }
}
internal IServiceProvider Services { get; }
/// <summary> Get the id of the cluster this silo is part of. </summary>
public string ClusterId
{
get { return globalConfig.HasMultiClusterNetwork ? globalConfig.ClusterId : null; }
}
/// <summary> SiloAddress for this silo. </summary>
public SiloAddress SiloAddress { get { return messageCenter.MyAddress; } }
/// <summary>
/// Silo termination event used to signal shutdown of this silo.
/// </summary>
public WaitHandle SiloTerminatedEvent { get { return siloTerminatedEvent; } } // one event for all types of termination (shutdown, stop and fast kill).
/// <summary>
/// Test hook connection for white-box testing of silo.
/// </summary>
public TestHooks TestHook;
/// <summary>
/// Creates and initializes the silo from the specified config data.
/// </summary>
/// <param name="name">Name of this silo.</param>
/// <param name="siloType">Type of this silo.</param>
/// <param name="config">Silo config data to be used for this silo.</param>
public Silo(string name, SiloType siloType, ClusterConfiguration config)
: this(name, siloType, config, null)
{
}
/// <summary>
/// Creates and initializes the silo from the specified config data.
/// </summary>
/// <param name="name">Name of this silo.</param>
/// <param name="siloType">Type of this silo.</param>
/// <param name="config">Silo config data to be used for this silo.</param>
/// <param name="keyStore">Local data store, mostly used for testing, shared between all silos running in same process.</param>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope",
Justification = "Should not Dispose of messageCenter in this method because it continues to run / exist after this point.")]
internal Silo(string name, SiloType siloType, ClusterConfiguration config, ILocalDataStore keyStore)
{
SystemStatus.Current = SystemStatus.Creating;
CurrentSilo = this;
var startTime = DateTime.UtcNow;
this.siloType = siloType;
Name = name;
siloTerminatedEvent = new ManualResetEvent(false);
OrleansConfig = config;
globalConfig = config.Globals;
config.OnConfigChange("Defaults", () => nodeConfig = config.GetOrCreateNodeConfigurationForSilo(name));
if (!LogManager.IsInitialized)
LogManager.Initialize(nodeConfig);
config.OnConfigChange("Defaults/Tracing", () => LogManager.Initialize(nodeConfig, true), false);
MultiClusterRegistrationStrategy.Initialize(config.Globals);
ActivationData.Init(config, nodeConfig);
StatisticsCollector.Initialize(nodeConfig);
SerializationManager.Initialize(globalConfig.SerializationProviders);
initTimeout = globalConfig.MaxJoinAttemptTime;
if (Debugger.IsAttached)
{
initTimeout = StandardExtensions.Max(TimeSpan.FromMinutes(10), globalConfig.MaxJoinAttemptTime);
stopTimeout = initTimeout;
}
IPEndPoint here = nodeConfig.Endpoint;
int generation = nodeConfig.Generation;
if (generation == 0)
{
generation = SiloAddress.AllocateNewGeneration();
nodeConfig.Generation = generation;
}
LogManager.MyIPEndPoint = here;
logger = LogManager.GetLogger("Silo", LoggerType.Runtime);
logger.Info(ErrorCode.SiloGcSetting, "Silo starting with GC settings: ServerGC={0} GCLatencyMode={1}", GCSettings.IsServerGC, Enum.GetName(typeof(GCLatencyMode), GCSettings.LatencyMode));
if (!GCSettings.IsServerGC || !GCSettings.LatencyMode.Equals(GCLatencyMode.Batch))
{
logger.Warn(ErrorCode.SiloGcWarning, "Note: Silo not running with ServerGC turned on or with GCLatencyMode.Batch enabled - recommend checking app config : <configuration>-<runtime>-<gcServer enabled=\"true\"> and <configuration>-<runtime>-<gcConcurrent enabled=\"false\"/>");
logger.Warn(ErrorCode.SiloGcWarning, "Note: ServerGC only kicks in on multi-core systems (settings enabling ServerGC have no effect on single-core machines).");
}
logger.Info(ErrorCode.SiloInitializing, "-------------- Initializing {0} silo on host {1} MachineName {2} at {3}, gen {4} --------------",
siloType, nodeConfig.DNSHostName, Environment.MachineName, here, generation);
logger.Info(ErrorCode.SiloInitConfig, "Starting silo {0} with the following configuration= " + Environment.NewLine + "{1}",
name, config.ToString(name));
if (keyStore != null)
{
// Re-establish reference to shared local key store in this app domain
LocalDataStoreInstance.LocalDataStore = keyStore;
}
// Configure DI using Startup type
bool usingCustomServiceProvider;
Services = StartupBuilder.ConfigureStartup(nodeConfig.StartupTypeName, out usingCustomServiceProvider);
healthCheckParticipants = new List<IHealthCheckParticipant>();
allSiloProviders = new List<IProvider>();
BufferPool.InitGlobalBufferPool(globalConfig);
PlacementStrategy.Initialize(globalConfig);
UnobservedExceptionsHandlerClass.SetUnobservedExceptionHandler(UnobservedExceptionHandler);
AppDomain.CurrentDomain.UnhandledException +=
(obj, ev) => DomainUnobservedExceptionHandler(obj, (Exception)ev.ExceptionObject);
try
{
grainFactory = Services.GetRequiredService<GrainFactory>();
}
catch (InvalidOperationException exc)
{
logger.Error(ErrorCode.SiloStartError, "Exception during Silo.Start, GrainFactory was not registered in Dependency Injection container", exc);
throw;
}
typeManager = new GrainTypeManager(
here.Address.Equals(IPAddress.Loopback),
grainFactory,
new SiloAssemblyLoader(OrleansConfig.Defaults.AdditionalAssemblyDirectories));
// Performance metrics
siloStatistics = new SiloStatisticsManager(globalConfig, nodeConfig);
config.OnConfigChange("Defaults/LoadShedding", () => siloStatistics.MetricsTable.NodeConfig = nodeConfig, false);
// The scheduler
scheduler = new OrleansTaskScheduler(globalConfig, nodeConfig);
healthCheckParticipants.Add(scheduler);
// Initialize the message center
var mc = new MessageCenter(here, generation, globalConfig, siloStatistics.MetricsTable);
if (nodeConfig.IsGatewayNode)
mc.InstallGateway(nodeConfig.ProxyGatewayEndpoint);
messageCenter = mc;
SiloIdentity = SiloAddress.ToLongString();
// GrainRuntime can be created only here, after messageCenter was created.
grainRuntime = new GrainRuntime(
globalConfig.ServiceId,
SiloIdentity,
grainFactory,
new TimerRegistry(),
new ReminderRegistry(),
new StreamProviderManager(),
Services);
// Now the router/directory service
// This has to come after the message center //; note that it then gets injected back into the message center.;
localGrainDirectory = new LocalGrainDirectory(this);
RegistrarManager.InitializeGrainDirectoryManager(localGrainDirectory, globalConfig.GlobalSingleInstanceNumberRetries);
// Now the activation directory.
// This needs to know which router to use so that it can keep the global directory in synch with the local one.
activationDirectory = new ActivationDirectory();
// Now the consistent ring provider
RingProvider = GlobalConfig.UseVirtualBucketsConsistentRing ?
(IConsistentRingProvider) new VirtualBucketsRingProvider(SiloAddress, GlobalConfig.NumVirtualBucketsConsistentRing)
: new ConsistentRingProvider(SiloAddress);
// to preserve backwards compatibility, only use the service provider to inject grain dependencies if the user supplied his own
// service provider, meaning that he is explicitly opting into it.
var grainCreator = new GrainCreator(grainRuntime, usingCustomServiceProvider ? Services : null);
Action<Dispatcher> setDispatcher;
catalog = new Catalog(Constants.CatalogId, SiloAddress, Name, LocalGrainDirectory, typeManager, scheduler, activationDirectory, config, grainCreator, out setDispatcher);
var dispatcher = new Dispatcher(scheduler, messageCenter, catalog, config);
setDispatcher(dispatcher);
RuntimeClient.Current = new InsideRuntimeClient(
dispatcher,
catalog,
LocalGrainDirectory,
SiloAddress,
config,
RingProvider,
typeManager,
grainFactory);
messageCenter.RerouteHandler = InsideRuntimeClient.Current.RerouteMessage;
messageCenter.SniffIncomingMessage = InsideRuntimeClient.Current.SniffIncomingMessage;
siloStatistics.MetricsTable.Scheduler = scheduler;
siloStatistics.MetricsTable.ActivationDirectory = activationDirectory;
siloStatistics.MetricsTable.ActivationCollector = catalog.ActivationCollector;
siloStatistics.MetricsTable.MessageCenter = messageCenter;
DeploymentLoadPublisher.CreateDeploymentLoadPublisher(this, globalConfig);
PlacementDirectorsManager.CreatePlacementDirectorsManager(globalConfig);
// Now the incoming message agents
incomingSystemAgent = new IncomingMessageAgent(Message.Categories.System, messageCenter, activationDirectory, scheduler, dispatcher);
incomingPingAgent = new IncomingMessageAgent(Message.Categories.Ping, messageCenter, activationDirectory, scheduler, dispatcher);
incomingAgent = new IncomingMessageAgent(Message.Categories.Application, messageCenter, activationDirectory, scheduler, dispatcher);
membershipFactory = new MembershipFactory();
multiClusterFactory = new MultiClusterOracleFactory();
reminderFactory = new LocalReminderServiceFactory();
SystemStatus.Current = SystemStatus.Created;
StringValueStatistic.FindOrCreate(StatisticNames.SILO_START_TIME,
() => LogFormatter.PrintDate(startTime)); // this will help troubleshoot production deployment when looking at MDS logs.
TestHook = new TestHooks(this);
logger.Info(ErrorCode.SiloInitializingFinished, "-------------- Started silo {0}, ConsistentHashCode {1:X} --------------", SiloAddress.ToLongString(), SiloAddress.GetConsistentHashCode());
}
private void CreateSystemTargets()
{
logger.Verbose("Creating System Targets for this silo.");
logger.Verbose("Creating {0} System Target", "SiloControl");
RegisterSystemTarget(new SiloControl(this));
logger.Verbose("Creating {0} System Target", "StreamProviderUpdateAgent");
RegisterSystemTarget(new StreamProviderManagerAgent(this, allSiloProviders));
logger.Verbose("Creating {0} System Target", "DeploymentLoadPublisher");
RegisterSystemTarget(DeploymentLoadPublisher.Instance);
logger.Verbose("Creating {0} System Target", "RemoteGrainDirectory + CacheValidator");
RegisterSystemTarget(LocalGrainDirectory.RemoteGrainDirectory);
RegisterSystemTarget(LocalGrainDirectory.CacheValidator);
logger.Verbose("Creating {0} System Target", "RemoteClusterGrainDirectory");
RegisterSystemTarget(LocalGrainDirectory.RemoteClusterGrainDirectory);
logger.Verbose("Creating {0} System Target", "ClientObserverRegistrar + TypeManager");
clientRegistrar = new ClientObserverRegistrar(SiloAddress, LocalGrainDirectory, LocalScheduler, OrleansConfig);
RegisterSystemTarget(clientRegistrar);
RegisterSystemTarget(new TypeManager(SiloAddress, LocalTypeManager));
logger.Verbose("Creating {0} System Target", "MembershipOracle");
RegisterSystemTarget((SystemTarget) membershipOracle);
if (multiClusterOracle != null)
{
logger.Verbose("Creating {0} System Target", "MultiClusterOracle");
RegisterSystemTarget((SystemTarget)multiClusterOracle);
}
logger.Verbose("Finished creating System Targets for this silo.");
}
private void InjectDependencies()
{
healthCheckParticipants.Add(membershipOracle);
catalog.SiloStatusOracle = LocalSiloStatusOracle;
localGrainDirectory.CatalogSiloStatusListener = catalog;
LocalSiloStatusOracle.SubscribeToSiloStatusEvents(localGrainDirectory);
messageCenter.SiloDeadOracle = LocalSiloStatusOracle.IsDeadSilo;
// consistentRingProvider is not a system target per say, but it behaves like the localGrainDirectory, so it is here
LocalSiloStatusOracle.SubscribeToSiloStatusEvents((ISiloStatusListener)RingProvider);
LocalSiloStatusOracle.SubscribeToSiloStatusEvents(DeploymentLoadPublisher.Instance);
if (!globalConfig.ReminderServiceType.Equals(GlobalConfiguration.ReminderServiceProviderType.Disabled))
{
// start the reminder service system target
reminderService = reminderFactory.CreateReminderService(this, grainFactory, initTimeout);
RegisterSystemTarget((SystemTarget) reminderService);
}
RegisterSystemTarget(catalog);
scheduler.QueueAction(catalog.Start, catalog.SchedulingContext)
.WaitWithThrow(initTimeout);
// SystemTarget for provider init calls
providerManagerSystemTarget = new ProviderManagerSystemTarget(this);
RegisterSystemTarget(providerManagerSystemTarget);
}
private async Task CreateSystemGrains()
{
if (siloType == SiloType.Primary)
await membershipFactory.CreateMembershipTableProvider(catalog, this).WithTimeout(initTimeout);
}
/// <summary> Perform silo startup operations. </summary>
public void Start()
{
try
{
DoStart();
}
catch (Exception exc)
{
logger.Error(ErrorCode.SiloStartError, "Exception during Silo.Start", exc);
throw;
}
}
private void DoStart()
{
lock (lockable)
{
if (!SystemStatus.Current.Equals(SystemStatus.Created))
throw new InvalidOperationException(String.Format("Calling Silo.Start() on a silo which is not in the Created state. This silo is in the {0} state.", SystemStatus.Current));
SystemStatus.Current = SystemStatus.Starting;
}
logger.Info(ErrorCode.SiloStarting, "Silo Start()");
// Hook up to receive notification of process exit / Ctrl-C events
AppDomain.CurrentDomain.ProcessExit += HandleProcessExit;
Console.CancelKeyPress += HandleProcessExit;
ConfigureThreadPoolAndServicePointSettings();
// This has to start first so that the directory system target factory gets loaded before we start the router.
typeManager.Start();
InsideRuntimeClient.Current.Start();
// The order of these 4 is pretty much arbitrary.
scheduler.Start();
messageCenter.Start();
incomingPingAgent.Start();
incomingSystemAgent.Start();
incomingAgent.Start();
LocalGrainDirectory.Start();
// Set up an execution context for this thread so that the target creation steps can use asynch values.
RuntimeContext.InitializeMainThread();
SiloProviderRuntime.Initialize(GlobalConfig, SiloIdentity, grainFactory, Services);
InsideRuntimeClient.Current.CurrentStreamProviderRuntime = SiloProviderRuntime.Instance;
statisticsProviderManager = new StatisticsProviderManager("Statistics", SiloProviderRuntime.Instance);
string statsProviderName = statisticsProviderManager.LoadProvider(GlobalConfig.ProviderConfigurations)
.WaitForResultWithThrow(initTimeout);
if (statsProviderName != null)
LocalConfig.StatisticsProviderName = statsProviderName;
allSiloProviders.AddRange(statisticsProviderManager.GetProviders());
// can call SetSiloMetricsTableDataManager only after MessageCenter is created (dependency on this.SiloAddress).
siloStatistics.SetSiloStatsTableDataManager(this, nodeConfig).WaitWithThrow(initTimeout);
siloStatistics.SetSiloMetricsTableDataManager(this, nodeConfig).WaitWithThrow(initTimeout);
IMembershipTable membershipTable = membershipFactory.GetMembershipTable(GlobalConfig.LivenessType, GlobalConfig.MembershipTableAssembly);
membershipOracle = membershipFactory.CreateMembershipOracle(this, membershipTable);
multiClusterOracle = multiClusterFactory.CreateGossipOracle(this).WaitForResultWithThrow(initTimeout);
// This has to follow the above steps that start the runtime components
CreateSystemTargets();
InjectDependencies();
// Validate the configuration.
GlobalConfig.Application.ValidateConfiguration(logger);
// ensure this runs in the grain context, wait for it to complete
scheduler.QueueTask(CreateSystemGrains, catalog.SchedulingContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose) { logger.Verbose("System grains created successfully."); }
// Initialize storage providers once we have a basic silo runtime environment operating
storageProviderManager = new StorageProviderManager(grainFactory, Services);
scheduler.QueueTask(
() => storageProviderManager.LoadStorageProviders(GlobalConfig.ProviderConfigurations),
providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
catalog.SetStorageManager(storageProviderManager);
allSiloProviders.AddRange(storageProviderManager.GetProviders());
if (logger.IsVerbose) { logger.Verbose("Storage provider manager created successfully."); }
// Load and init stream providers before silo becomes active
var siloStreamProviderManager = (StreamProviderManager)grainRuntime.StreamProviderManager;
scheduler.QueueTask(
() => siloStreamProviderManager.LoadStreamProviders(GlobalConfig.ProviderConfigurations, SiloProviderRuntime.Instance),
providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
InsideRuntimeClient.Current.CurrentStreamProviderManager = siloStreamProviderManager;
allSiloProviders.AddRange(siloStreamProviderManager.GetProviders());
if (logger.IsVerbose) { logger.Verbose("Stream provider manager created successfully."); }
ISchedulingContext statusOracleContext = ((SystemTarget)LocalSiloStatusOracle).SchedulingContext;
bool waitForPrimaryToStart = globalConfig.PrimaryNodeIsRequired && siloType != SiloType.Primary;
if (waitForPrimaryToStart) // only in MembershipTableGrain case.
{
scheduler.QueueTask(() => membershipFactory.WaitForTableToInit(membershipTable), statusOracleContext)
.WaitWithThrow(initTimeout);
}
scheduler.QueueTask(() => membershipTable.InitializeMembershipTable(GlobalConfig, true, LogManager.GetLogger(membershipTable.GetType().Name)), statusOracleContext)
.WaitWithThrow(initTimeout);
scheduler.QueueTask(() => LocalSiloStatusOracle.Start(), statusOracleContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose) { logger.Verbose("Local silo status oracle created successfully."); }
scheduler.QueueTask(LocalSiloStatusOracle.BecomeActive, statusOracleContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose) { logger.Verbose("Local silo status oracle became active successfully."); }
//if running in multi cluster scenario, start the MultiClusterNetwork Oracle
if (GlobalConfig.HasMultiClusterNetwork)
{
logger.Info("Creating multicluster oracle with my ServiceId={0} and ClusterId={1}.",
GlobalConfig.ServiceId, GlobalConfig.ClusterId);
ISchedulingContext clusterStatusContext = ((SystemTarget) multiClusterOracle).SchedulingContext;
scheduler.QueueTask(() => multiClusterOracle.Start(LocalSiloStatusOracle), clusterStatusContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose) { logger.Verbose("multicluster oracle created successfully."); }
}
try
{
siloStatistics.Start(LocalConfig);
if (logger.IsVerbose) { logger.Verbose("Silo statistics manager started successfully."); }
// Finally, initialize the deployment load collector, for grains with load-based placement
scheduler.QueueTask(DeploymentLoadPublisher.Instance.Start, DeploymentLoadPublisher.Instance.SchedulingContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose) { logger.Verbose("Silo deployment load publisher started successfully."); }
// Start background timer tick to watch for platform execution stalls, such as when GC kicks in
platformWatchdog = new Watchdog(nodeConfig.StatisticsLogWriteInterval, healthCheckParticipants);
platformWatchdog.Start();
if (logger.IsVerbose) { logger.Verbose("Silo platform watchdog started successfully."); }
if (reminderService != null)
{
// so, we have the view of the membership in the consistentRingProvider. We can start the reminder service
scheduler.QueueTask(reminderService.Start, ((SystemTarget)reminderService).SchedulingContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose)
{
logger.Verbose("Reminder service started successfully.");
}
}
bootstrapProviderManager = new BootstrapProviderManager();
scheduler.QueueTask(
() => bootstrapProviderManager.LoadAppBootstrapProviders(GlobalConfig.ProviderConfigurations),
providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
BootstrapProviders = bootstrapProviderManager.GetProviders(); // Data hook for testing & diagnotics
allSiloProviders.AddRange(BootstrapProviders);
if (logger.IsVerbose) { logger.Verbose("App bootstrap calls done successfully."); }
// Start stream providers after silo is active (so the pulling agents don't start sending messages before silo is active).
// also after bootstrap provider started so bootstrap provider can initialize everything stream before events from this silo arrive.
scheduler.QueueTask(siloStreamProviderManager.StartStreamProviders, providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
if (logger.IsVerbose) { logger.Verbose("Stream providers started successfully."); }
// Now that we're active, we can start the gateway
var mc = messageCenter as MessageCenter;
if (mc != null)
{
mc.StartGateway(clientRegistrar);
}
if (logger.IsVerbose) { logger.Verbose("Message gateway service started successfully."); }
SystemStatus.Current = SystemStatus.Running;
}
catch (Exception exc)
{
SafeExecute(() => logger.Error(ErrorCode.Runtime_Error_100330, String.Format("Error starting silo {0}. Going to FastKill().", SiloAddress), exc));
FastKill(); // if failed after Membership became active, mark itself as dead in Membership abale.
throw;
}
if (logger.IsVerbose) { logger.Verbose("Silo.Start complete: System status = {0}", SystemStatus.Current); }
}
/// <summary>
/// Load and initialize newly added stream providers. Remove providers that are not on the list that's being passed in.
/// </summary>
public async Task UpdateStreamProviders(IDictionary<string, ProviderCategoryConfiguration> streamProviderConfigurations)
{
IStreamProviderManagerAgent streamProviderUpdateAgent =
InsideRuntimeClient.Current.InternalGrainFactory.GetSystemTarget<IStreamProviderManagerAgent>(Constants.StreamProviderManagerAgentSystemTargetId, this.SiloAddress);
await scheduler.QueueTask(() => streamProviderUpdateAgent.UpdateStreamProviders(streamProviderConfigurations), providerManagerSystemTarget.SchedulingContext)
.WithTimeout(initTimeout);
}
private void ConfigureThreadPoolAndServicePointSettings()
{
#if !NETSTANDARD_TODO
if (nodeConfig.MinDotNetThreadPoolSize > 0)
{
int workerThreads;
int completionPortThreads;
ThreadPool.GetMinThreads(out workerThreads, out completionPortThreads);
if (nodeConfig.MinDotNetThreadPoolSize > workerThreads ||
nodeConfig.MinDotNetThreadPoolSize > completionPortThreads)
{
// if at least one of the new values is larger, set the new min values to be the larger of the prev. and new config value.
int newWorkerThreads = Math.Max(nodeConfig.MinDotNetThreadPoolSize, workerThreads);
int newCompletionPortThreads = Math.Max(nodeConfig.MinDotNetThreadPoolSize, completionPortThreads);
bool ok = ThreadPool.SetMinThreads(newWorkerThreads, newCompletionPortThreads);
if (ok)
{
logger.Info(ErrorCode.SiloConfiguredThreadPool,
"Configured ThreadPool.SetMinThreads() to values: {0},{1}. Previous values are: {2},{3}.",
newWorkerThreads, newCompletionPortThreads, workerThreads, completionPortThreads);
}
else
{
logger.Warn(ErrorCode.SiloFailedToConfigureThreadPool,
"Failed to configure ThreadPool.SetMinThreads(). Tried to set values to: {0},{1}. Previous values are: {2},{3}.",
newWorkerThreads, newCompletionPortThreads, workerThreads, completionPortThreads);
}
}
}
// Set .NET ServicePointManager settings to optimize throughput performance when using Azure storage
// http://blogs.msdn.com/b/windowsazurestorage/archive/2010/06/25/nagle-s-algorithm-is-not-friendly-towards-small-requests.aspx
logger.Info(ErrorCode.SiloConfiguredServicePointManager,
"Configured .NET ServicePointManager to Expect100Continue={0}, DefaultConnectionLimit={1}, UseNagleAlgorithm={2} to improve Azure storage performance.",
nodeConfig.Expect100Continue, nodeConfig.DefaultConnectionLimit, nodeConfig.UseNagleAlgorithm);
ServicePointManager.Expect100Continue = nodeConfig.Expect100Continue;
ServicePointManager.DefaultConnectionLimit = nodeConfig.DefaultConnectionLimit;
ServicePointManager.UseNagleAlgorithm = nodeConfig.UseNagleAlgorithm;
#endif
}
/// <summary>
/// Gracefully stop the run time system only, but not the application.
/// Applications requests would be abruptly terminated, while the internal system state gracefully stopped and saved as much as possible.
/// Grains are not deactivated.
/// </summary>
public void Stop()
{
Terminate(false);
}
/// <summary>
/// Gracefully stop the run time system and the application.
/// All grains will be properly deactivated.
/// All in-flight applications requests would be awaited and finished gracefully.
/// </summary>
public void Shutdown()
{
Terminate(true);
}
/// <summary>
/// Gracefully stop the run time system only, but not the application.
/// Applications requests would be abruptly terminated, while the internal system state gracefully stopped and saved as much as possible.
/// </summary>
private void Terminate(bool gracefully)
{
string operation = gracefully ? "Shutdown()" : "Stop()";
bool stopAlreadyInProgress = false;
lock (lockable)
{
if (SystemStatus.Current.Equals(SystemStatus.Stopping) ||
SystemStatus.Current.Equals(SystemStatus.ShuttingDown) ||
SystemStatus.Current.Equals(SystemStatus.Terminated))
{
stopAlreadyInProgress = true;
// Drop through to wait below
}
else if (!SystemStatus.Current.Equals(SystemStatus.Running))
{
throw new InvalidOperationException(String.Format("Calling Silo.{0} on a silo which is not in the Running state. This silo is in the {1} state.", operation, SystemStatus.Current));
}
else
{
if (gracefully)
SystemStatus.Current = SystemStatus.ShuttingDown;
else
SystemStatus.Current = SystemStatus.Stopping;
}
}
if (stopAlreadyInProgress)
{
logger.Info(ErrorCode.SiloStopInProgress, "Silo termination is in progress - Will wait for it to finish");
var pause = TimeSpan.FromSeconds(1);
while (!SystemStatus.Current.Equals(SystemStatus.Terminated))
{
logger.Info(ErrorCode.WaitingForSiloStop, "Waiting {0} for termination to complete", pause);
Thread.Sleep(pause);
}
return;
}
try
{
try
{
if (gracefully)
{
logger.Info(ErrorCode.SiloShuttingDown, "Silo starting to Shutdown()");
// 1: Write "ShutDown" state in the table + broadcast gossip msgs to re-read the table to everyone
scheduler.QueueTask(LocalSiloStatusOracle.ShutDown, ((SystemTarget)LocalSiloStatusOracle).SchedulingContext)
.WaitWithThrow(stopTimeout);
}
else
{
logger.Info(ErrorCode.SiloStopping, "Silo starting to Stop()");
// 1: Write "Stopping" state in the table + broadcast gossip msgs to re-read the table to everyone
scheduler.QueueTask(LocalSiloStatusOracle.Stop, ((SystemTarget)LocalSiloStatusOracle).SchedulingContext)
.WaitWithThrow(stopTimeout);
}
}
catch (Exception exc)
{
logger.Error(ErrorCode.SiloFailedToStopMembership, String.Format("Failed to {0} LocalSiloStatusOracle. About to FastKill this silo.", operation), exc);
return; // will go to finally
}
if (reminderService != null)
{
// 2: Stop reminder service
scheduler.QueueTask(reminderService.Stop, ((SystemTarget) reminderService).SchedulingContext)
.WaitWithThrow(stopTimeout);
}
if (gracefully)
{
// 3: Deactivate all grains
SafeExecute(() => catalog.DeactivateAllActivations().WaitWithThrow(stopTimeout));
}
// 3: Stop the gateway
SafeExecute(messageCenter.StopAcceptingClientMessages);
// 4: Start rejecting all silo to silo application messages
SafeExecute(messageCenter.BlockApplicationMessages);
// 5: Stop scheduling/executing application turns
SafeExecute(scheduler.StopApplicationTurns);
// 6: Directory: Speed up directory handoff
// will be started automatically when directory receives SiloStatusChangeNotification(Stopping)
// 7:
SafeExecute(() => LocalGrainDirectory.StopPreparationCompletion.WaitWithThrow(stopTimeout));
// The order of closing providers might be importan: Stats, streams, boostrap, storage.
// Stats first since no one depends on it.
// Storage should definitely be last since other providers ma ybe using it, potentilay indirectly.
// Streams and Bootstrap - the order is less clear. Seems like Bootstrap may indirecly depend on Streams, but not the other way around.
// 8:
SafeExecute(() =>
{
scheduler.QueueTask(() => statisticsProviderManager.CloseProviders(), providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
});
// 9:
SafeExecute(() =>
{
var siloStreamProviderManager = (StreamProviderManager)grainRuntime.StreamProviderManager;
scheduler.QueueTask(() => siloStreamProviderManager.CloseProviders(), providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
});
// 10:
SafeExecute(() =>
{
scheduler.QueueTask(() => bootstrapProviderManager.CloseProviders(), providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
});
// 11:
SafeExecute(() =>
{
scheduler.QueueTask(() => storageProviderManager.CloseProviders(), providerManagerSystemTarget.SchedulingContext)
.WaitWithThrow(initTimeout);
});
}
finally
{
// 10, 11, 12: Write Dead in the table, Drain scheduler, Stop msg center, ...
logger.Info(ErrorCode.SiloStopped, "Silo is Stopped()");
FastKill();
}
}
/// <summary>
/// Ungracefully stop the run time system and the application running on it.
/// Applications requests would be abruptly terminated, and the internal system state quickly stopped with minimal cleanup.
/// </summary>
private void FastKill()
{
if (!GlobalConfig.LivenessType.Equals(GlobalConfiguration.LivenessProviderType.MembershipTableGrain))
{
// do not execute KillMyself if using MembershipTableGrain, since it will fail, as we've already stopped app scheduler turns.
SafeExecute(() => scheduler.QueueTask( LocalSiloStatusOracle.KillMyself, ((SystemTarget)LocalSiloStatusOracle).SchedulingContext)
.WaitWithThrow(stopTimeout));
}
// incoming messages
SafeExecute(incomingSystemAgent.Stop);
SafeExecute(incomingPingAgent.Stop);
SafeExecute(incomingAgent.Stop);
// timers
SafeExecute(InsideRuntimeClient.Current.Stop);
if (platformWatchdog != null)
SafeExecute(platformWatchdog.Stop); // Silo may be dying before platformWatchdog was set up
SafeExecute(scheduler.Stop);
SafeExecute(scheduler.PrintStatistics);
SafeExecute(activationDirectory.PrintActivationDirectory);
SafeExecute(messageCenter.Stop);
SafeExecute(siloStatistics.Stop);
SafeExecute(GrainTypeManager.Stop);
UnobservedExceptionsHandlerClass.ResetUnobservedExceptionHandler();
SafeExecute(() => SystemStatus.Current = SystemStatus.Terminated);
SafeExecute(LogManager.Close);
// Setting the event should be the last thing we do.
// Do nothijng after that!
siloTerminatedEvent.Set();
}
private void SafeExecute(Action action)
{
Utils.SafeExecute(action, logger, "Silo.Stop");
}
private void HandleProcessExit(object sender, EventArgs e)
{
// NOTE: We need to minimize the amount of processing occurring on this code path -- we only have under approx 2-3 seconds before process exit will occur
logger.Warn(ErrorCode.Runtime_Error_100220, "Process is exiting");
LogManager.Flush();
try
{
lock (lockable)
{
if (!SystemStatus.Current.Equals(SystemStatus.Running)) return;
SystemStatus.Current = SystemStatus.Stopping;
}
if (!TestHook.ExecuteFastKillInProcessExit) return;
logger.Info(ErrorCode.SiloStopping, "Silo.HandleProcessExit() - starting to FastKill()");
FastKill();
}
finally
{
LogManager.Close();
}
}
/// <summary>
/// Test hook functions for white box testing.
/// </summary>
public class TestHooks
#if !NETSTANDARD_TODO
: MarshalByRefObject
#endif
{
private readonly Silo silo;
internal bool ExecuteFastKillInProcessExit;
internal IConsistentRingProvider ConsistentRingProvider
{
get { return CheckReturnBoundaryReference("ring provider", silo.RingProvider); }
}
internal bool HasStatisticsProvider { get { return silo.statisticsProviderManager != null; } }
internal object StatisticsProvider
{
get
{
if (silo.statisticsProviderManager == null) return null;
var provider = silo.statisticsProviderManager.GetProvider(silo.LocalConfig.StatisticsProviderName);
return CheckReturnBoundaryReference("statistics provider", provider);
}
}
/// <summary>
/// Populates the provided <paramref name="collection"/> with the assemblies generated by this silo.
/// </summary>
/// <param name="collection">The collection to populate.</param>
public void UpdateGeneratedAssemblies(GeneratedAssemblies collection)
{
var generatedAssemblies = CodeGeneratorManager.GetGeneratedAssemblies();
foreach (var asm in generatedAssemblies)
{
collection.Add(asm.Key, asm.Value);
}
}
internal Action<GrainId> Debug_OnDecideToCollectActivation { get; set; }
internal TestHooks(Silo s)
{
silo = s;
ExecuteFastKillInProcessExit = true;
}
internal Guid ServiceId { get { return silo.GlobalConfig.ServiceId; } }
/// <summary>
/// Get list of providers loaded in this silo.
/// </summary>
/// <returns></returns>
internal IEnumerable<string> GetStorageProviderNames()
{
return silo.StorageProviderManager.GetProviderNames();
}
/// <summary>
/// Find the named storage provider loaded in this silo.
/// </summary>
/// <returns></returns>
internal IStorageProvider GetStorageProvider(string name)
{
IStorageProvider provider = silo.StorageProviderManager.GetProvider(name) as IStorageProvider;
return CheckReturnBoundaryReference("storage provider", provider);
}
internal string PrintSiloConfig()
{
return silo.OrleansConfig.ToString(silo.Name);
}
internal IBootstrapProvider GetBootstrapProvider(string name)
{
IBootstrapProvider provider = silo.BootstrapProviders.First(p => p.Name.Equals(name));
return CheckReturnBoundaryReference("bootstrap provider", provider);
}
internal IEnumerable<string> GetStreamProviderNames()
{
return silo.StreamProviderManager.GetStreamProviders().Select(p => ((IProvider)p).Name).ToList();
}
internal IEnumerable<string> GetAllSiloProviderNames()
{
var providers = silo.AllSiloProviders;
return providers.Select(p => ((IProvider)p).Name).ToList();
}
internal void SuppressFastKillInHandleProcessExit()
{
ExecuteFastKillInProcessExit = false;
}
// used for testing only: returns directory entries whose type name contains the given string
internal IDictionary<GrainId, IGrainInfo> GetDirectoryForTypeNamesContaining(string expr)
{
var x = new Dictionary<GrainId, IGrainInfo>();
foreach (var kvp in silo.localGrainDirectory.DirectoryPartition.GetItems())
{
if (kvp.Key.IsSystemTarget || kvp.Key.IsClient || !kvp.Key.IsGrain)
continue;// Skip system grains, system targets and clients
if (silo.catalog.GetGrainTypeName(kvp.Key).Contains(expr))
x.Add(kvp.Key, kvp.Value);
}
return x;
}
// store silos for which we simulate faulty communication
// number indicates how many percent of requests are lost
internal ConcurrentDictionary<IPEndPoint, double> SimulatedMessageLoss;
internal void BlockSiloCommunication(IPEndPoint destination, double lost_percentage)
{
if (SimulatedMessageLoss == null)
SimulatedMessageLoss = new ConcurrentDictionary<IPEndPoint, double>();
SimulatedMessageLoss[destination] = lost_percentage;
}
internal void UnblockSiloCommunication()
{
SimulatedMessageLoss = null;
}
private readonly SafeRandom random = new SafeRandom();
internal bool ShouldDrop(Message msg)
{
if (SimulatedMessageLoss != null)
{
double blockedpercentage;
CurrentSilo.TestHook.SimulatedMessageLoss.TryGetValue(msg.TargetSilo.Endpoint, out blockedpercentage);
return (random.NextDouble() * 100 < blockedpercentage);
}
else
return false;
}
// this is only for white box testing - use RuntimeClient.Current.SendRequest instead
internal void SendMessageInternal(Message message)
{
silo.messageCenter.SendMessage(message);
}
// For white-box testing only
internal int UnregisterGrainForTesting(GrainId grain)
{
return silo.catalog.UnregisterGrainForTesting(grain);
}
// For white-box testing only
internal void SetDirectoryLazyDeregistrationDelay_ForTesting(TimeSpan timeSpan)
{
silo.OrleansConfig.Globals.DirectoryLazyDeregistrationDelay = timeSpan;
}
// For white-box testing only
internal void SetMaxForwardCount_ForTesting(int val)
{
silo.OrleansConfig.Globals.MaxForwardCount = val;
}
private static T CheckReturnBoundaryReference<T>(string what, T obj) where T : class
{
if (obj == null) return null;
if (
#if !NETSTANDARD_TODO
obj is MarshalByRefObject ||
#endif
obj is ISerializable)
{
// Referernce to the provider can safely be passed across app-domain boundary in unit test process
return obj;
}
throw new InvalidOperationException(string.Format("Cannot return reference to {0} {1} if it is not MarshalByRefObject or Serializable",
what, TypeUtils.GetFullName(obj.GetType())));
}
/// <summary>
/// Represents a collection of generated assemblies accross an application domain.
/// </summary>
public class GeneratedAssemblies
#if !NETSTANDARD_TODO
: MarshalByRefObject
#endif
{
/// <summary>
/// Initializes a new instance of the <see cref="GeneratedAssemblies"/> class.
/// </summary>
public GeneratedAssemblies()
{
Assemblies = new Dictionary<string, GeneratedAssembly>();
}
/// <summary>
/// Gets the assemblies which were produced by code generation.
/// </summary>
public Dictionary<string, GeneratedAssembly> Assemblies { get; }
/// <summary>
/// Adds a new assembly to this collection.
/// </summary>
/// <param name="key">
/// The full name of the assembly which code was generated for.
/// </param>
/// <param name="value">
/// The raw generated assembly.
/// </param>
public void Add(string key, GeneratedAssembly value)
{
if (!string.IsNullOrWhiteSpace(key))
{
Assemblies[key] = value;
}
}
}
/// <summary>
/// Methods for optimizing the code generator.
/// </summary>
public class CodeGeneratorOptimizer
#if !NETSTANDARD_TODO
: MarshalByRefObject
#endif
{
/// <summary>
/// Adds a cached assembly to the code generator.
/// </summary>
/// <param name="targetAssemblyName">The assembly which the cached assembly was generated for.</param>
/// <param name="cachedAssembly">The generated assembly.</param>
public void AddCachedAssembly(string targetAssemblyName, GeneratedAssembly cachedAssembly)
{
CodeGeneratorManager.AddGeneratedAssembly(targetAssemblyName, cachedAssembly);
}
}
}
private void UnobservedExceptionHandler(ISchedulingContext context, Exception exception)
{
var schedulingContext = context as SchedulingContext;
if (schedulingContext == null)
{
if (context == null)
logger.Error(ErrorCode.Runtime_Error_100102, "Silo caught an UnobservedException with context==null.", exception);
else
logger.Error(ErrorCode.Runtime_Error_100103, String.Format("Silo caught an UnobservedException with context of type different than OrleansContext. The type of the context is {0}. The context is {1}",
context.GetType(), context), exception);
}
else
{
logger.Error(ErrorCode.Runtime_Error_100104, String.Format("Silo caught an UnobservedException thrown by {0}.", schedulingContext.Activation), exception);
}
}
private void DomainUnobservedExceptionHandler(object context, Exception exception)
{
if (context is ISchedulingContext)
UnobservedExceptionHandler(context as ISchedulingContext, exception);
else
logger.Error(ErrorCode.Runtime_Error_100324, String.Format("Called DomainUnobservedExceptionHandler with context {0}.", context), exception);
}
internal void RegisterSystemTarget(SystemTarget target)
{
scheduler.RegisterWorkContext(target.SchedulingContext);
activationDirectory.RecordNewSystemTarget(target);
}
internal void UnregisterSystemTarget(SystemTarget target)
{
activationDirectory.RemoveSystemTarget(target);
scheduler.UnregisterWorkContext(target.SchedulingContext);
}
/// <summary> Return dump of diagnostic data from this silo. </summary>
/// <param name="all"></param>
/// <returns>Debug data for this silo.</returns>
public string GetDebugDump(bool all = true)
{
var sb = new StringBuilder();
foreach (var sytemTarget in activationDirectory.AllSystemTargets())
sb.AppendFormat("System target {0}:", sytemTarget.GrainId.ToString()).AppendLine();
var enumerator = activationDirectory.GetEnumerator();
while(enumerator.MoveNext())
{
Utils.SafeExecute(() =>
{
var activationData = enumerator.Current.Value;
var workItemGroup = scheduler.GetWorkItemGroup(new SchedulingContext(activationData));
if (workItemGroup == null)
{
sb.AppendFormat("Activation with no work item group!! Grain {0}, activation {1}.",
activationData.Grain,
activationData.ActivationId);
sb.AppendLine();
return;
}
if (all || activationData.State.Equals(ActivationState.Valid))
{
sb.AppendLine(workItemGroup.DumpStatus());
sb.AppendLine(activationData.DumpStatus());
}
});
}
logger.Info(ErrorCode.SiloDebugDump, sb.ToString());
return sb.ToString();
}
/// <summary> Object.ToString override -- summary info for this silo. </summary>
public override string ToString()
{
return localGrainDirectory.ToString();
}
}
// A dummy system target to use for scheduling context for provider Init calls, to allow them to make grain calls
internal class ProviderManagerSystemTarget : SystemTarget
{
public ProviderManagerSystemTarget(Silo currentSilo)
: base(Constants.ProviderManagerSystemTargetId, currentSilo.SiloAddress)
{
}
}
}
| |
/********************************************************************
The Multiverse Platform is made available under the MIT License.
Copyright (c) 2012 The Multiverse Foundation
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
*********************************************************************/
using System;
using System.Windows.Forms;
using System.Collections.Generic;
using System.Text;
using System.Xml;
using System.ComponentModel;
using Axiom.MathLib;
namespace Multiverse.Tools.WorldEditor
{
public class Skybox : IWorldObject
{
protected IWorldContainer parent;
protected WorldEditor app;
protected WorldTreeNode node;
protected WorldTreeNode parentNode;
protected bool inScene = false;
protected bool inTree = false;
protected string skyBoxName = null;
protected List<ToolStripButton> buttonBar;
public Skybox(IWorldContainer parentContainer, WorldEditor worldEditor)
{
this.parent = parentContainer;
this.app = worldEditor;
}
public Skybox(IWorldContainer parentContainer, WorldEditor worldEditor, XmlReader r)
{
this.parent = parentContainer;
this.app = worldEditor;
FromXml(r);
}
protected void FromXml(XmlReader r)
{
// first parse the attributes
for (int i = 0; i < r.AttributeCount; i++)
{
r.MoveToAttribute(i);
// set the field in this object based on the element we just read
switch (r.Name)
{
case "Name":
skyBoxName = r.Value;
break;
}
}
r.MoveToElement(); //Moves the reader back to the element node.
}
[TypeConverter(typeof(SkyboxAssetListConverter)), DescriptionAttribute("Name of the skybox as defined in the asset repository."), CategoryAttribute("Miscellaneous")]
public string SkyboxName
{
get
{
if (skyBoxName == null || skyBoxName == "")
{
return "None";
}
else
{
return app.Assets.assetFromAssetName(skyBoxName).Name;
}
}
set
{
if (value == "None")
{
skyBoxName = null;
app.SetSkybox(false, null);
}
else
{
skyBoxName = app.Assets.assetFromName(value).AssetName;
app.SetSkybox(true, skyBoxName);
}
}
}
#region IWorldObject Members
public void AddToTree(WorldTreeNode parentNode)
{
this.parentNode = parentNode;
// add the ocean node
node = app.MakeTreeNode(this, "Skybox");
parentNode.Nodes.Add(node);
CommandMenuBuilder menuBuilder = new CommandMenuBuilder();
menuBuilder.Add("Copy Description", "", app.copyToClipboardMenuButton_Click);
menuBuilder.Add("Help", "Skybox", app.HelpClickHandler);
node.ContextMenuStrip = menuBuilder.Menu;
inTree = true;
buttonBar = menuBuilder.ButtonBar;
}
[BrowsableAttribute(false)]
public bool IsGlobal
{
get
{
return true;
}
}
[BrowsableAttribute(false)]
public bool IsTopLevel
{
get
{
return false;
}
}
public void Clone(IWorldContainer copyParent)
{
}
[BrowsableAttribute(false)]
public bool WorldViewSelectable
{
get
{
return false;
}
set
{
// This property is not relevent for this object.
}
}
[BrowsableAttribute(false)]
public string ObjectAsString
{
get
{
string objString = String.Format("Name:{0}\r\n", ObjectType);
objString += String.Format("\tSkyboxName={0}\r\n", SkyboxName);
objString += "\r\n";
return objString;
}
}
[BrowsableAttribute(false)]
public List<ToolStripButton> ButtonBar
{
get
{
return buttonBar;
}
}
public void RemoveFromTree()
{
if (node.IsSelected)
{
node.UnSelect();
}
parentNode.Nodes.Remove(node);
parentNode = null;
node = null;
inTree = false;
}
public void AddToScene()
{
if ((skyBoxName == null) || skyBoxName == "")
{
app.SetSkybox(false, null);
}
else
{
app.SetSkybox(true, skyBoxName);
}
}
public void UpdateScene(UpdateTypes type, UpdateHint hint)
{
}
public void CheckAssets()
{
if (!app.CheckMaterialExists(skyBoxName))
{
app.AddMissingAsset(string.Format("Material: {0}", skyBoxName));
}
}
[BrowsableAttribute(false)]
public WorldTreeNode Node
{
get
{
return node;
}
}
public void RemoveFromScene()
{
app.SetSkybox(false, null);
}
public void ToXml(XmlWriter w)
{
if ((skyBoxName != null) && skyBoxName != "")
{
w.WriteStartElement("Skybox");
w.WriteAttributeString("Name", skyBoxName);
w.WriteEndElement();
}
}
[BrowsableAttribute(false)]
public Vector3 FocusLocation
{
get
{
return Vector3.Zero;
}
}
[BrowsableAttribute(false)]
public bool Highlight
{
get
{
return false;
}
set
{
// do nothing
}
}
[DescriptionAttribute("The type of this object."), CategoryAttribute("Miscellaneous")]
public string ObjectType
{
get
{
return "Skybox";
}
}
public void ToManifest(System.IO.StreamWriter w)
{
if ((skyBoxName != null) && (skyBoxName != ""))
{
w.WriteLine("Material:{0}", skyBoxName);
}
}
[BrowsableAttribute(false)]
public bool AcceptObjectPlacement
{
get
{
return false;
}
set
{
//not implemented for this type of object
}
}
#endregion
#region IDisposable Members
public void Dispose()
{
throw new Exception("The method or operation is not implemented.");
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Collections.Generic;
using IEnumerable = System.Collections.IEnumerable;
using SuppressMessageAttribute = System.Diagnostics.CodeAnalysis.SuppressMessageAttribute;
namespace System.Xml.Linq
{
internal struct Inserter
{
private XContainer _parent;
private XNode _previous;
private string _text;
public Inserter(XContainer parent, XNode anchor)
{
_parent = parent;
_previous = anchor;
_text = null;
}
public void Add(object content)
{
AddContent(content);
if (_text != null)
{
if (_parent.content == null)
{
if (_parent.SkipNotify())
{
_parent.content = _text;
}
else
{
if (_text.Length > 0)
{
InsertNode(new XText(_text));
}
else
{
if (_parent is XElement)
{
// Change in the serialization of an empty element:
// from empty tag to start/end tag pair
_parent.NotifyChanging(_parent, XObjectChangeEventArgs.Value);
if (_parent.content != null) throw new InvalidOperationException(SR.InvalidOperation_ExternalCode);
_parent.content = _text;
_parent.NotifyChanged(_parent, XObjectChangeEventArgs.Value);
}
else
{
_parent.content = _text;
}
}
}
}
else if (_text.Length > 0)
{
XText prevXText = _previous as XText;
if (prevXText != null && !(_previous is XCData))
{
prevXText.Value += _text;
}
else
{
_parent.ConvertTextToNode();
InsertNode(new XText(_text));
}
}
}
}
private void AddContent(object content)
{
if (content == null) return;
XNode n = content as XNode;
if (n != null)
{
AddNode(n);
return;
}
string s = content as string;
if (s != null)
{
AddString(s);
return;
}
XStreamingElement x = content as XStreamingElement;
if (x != null)
{
AddNode(new XElement(x));
return;
}
object[] o = content as object[];
if (o != null)
{
foreach (object obj in o) AddContent(obj);
return;
}
IEnumerable e = content as IEnumerable;
if (e != null)
{
foreach (object obj in e) AddContent(obj);
return;
}
if (content is XAttribute) throw new ArgumentException(SR.Argument_AddAttribute);
AddString(XContainer.GetStringValue(content));
}
private void AddNode(XNode n)
{
_parent.ValidateNode(n, _previous);
if (n.parent != null)
{
n = n.CloneNode();
}
else
{
XNode p = _parent;
while (p.parent != null) p = p.parent;
if (n == p) n = n.CloneNode();
}
_parent.ConvertTextToNode();
if (_text != null)
{
if (_text.Length > 0)
{
XText prevXText = _previous as XText;
if (prevXText != null && !(_previous is XCData))
{
prevXText.Value += _text;
}
else
{
InsertNode(new XText(_text));
}
}
_text = null;
}
InsertNode(n);
}
private void AddString(string s)
{
_parent.ValidateString(s);
_text += s;
}
// Prepends if previous == null, otherwise inserts after previous
private void InsertNode(XNode n)
{
bool notify = _parent.NotifyChanging(n, XObjectChangeEventArgs.Add);
if (n.parent != null) throw new InvalidOperationException(SR.InvalidOperation_ExternalCode);
n.parent = _parent;
if (_parent.content == null || _parent.content is string)
{
n.next = n;
_parent.content = n;
}
else if (_previous == null)
{
XNode last = (XNode)_parent.content;
n.next = last.next;
last.next = n;
}
else
{
n.next = _previous.next;
_previous.next = n;
if (_parent.content == _previous) _parent.content = n;
}
_previous = n;
if (notify) _parent.NotifyChanged(n, XObjectChangeEventArgs.Add);
}
}
internal struct NamespaceCache
{
private XNamespace _ns;
private string _namespaceName;
public XNamespace Get(string namespaceName)
{
if ((object)namespaceName == (object)_namespaceName) return _ns;
_namespaceName = namespaceName;
_ns = XNamespace.Get(namespaceName);
return _ns;
}
}
internal struct ElementWriter
{
private XmlWriter _writer;
private NamespaceResolver _resolver;
public ElementWriter(XmlWriter writer)
{
_writer = writer;
_resolver = new NamespaceResolver();
}
public void WriteElement(XElement e)
{
PushAncestors(e);
XElement root = e;
XNode n = e;
while (true)
{
e = n as XElement;
if (e != null)
{
WriteStartElement(e);
if (e.content == null)
{
WriteEndElement();
}
else
{
string s = e.content as string;
if (s != null)
{
_writer.WriteString(s);
WriteFullEndElement();
}
else
{
n = ((XNode)e.content).next;
continue;
}
}
}
else
{
n.WriteTo(_writer);
}
while (n != root && n == n.parent.content)
{
n = n.parent;
WriteFullEndElement();
}
if (n == root) break;
n = n.next;
}
}
private string GetPrefixOfNamespace(XNamespace ns, bool allowDefaultNamespace)
{
string namespaceName = ns.NamespaceName;
if (namespaceName.Length == 0) return string.Empty;
string prefix = _resolver.GetPrefixOfNamespace(ns, allowDefaultNamespace);
if (prefix != null) return prefix;
if ((object)namespaceName == (object)XNamespace.xmlPrefixNamespace) return "xml";
if ((object)namespaceName == (object)XNamespace.xmlnsPrefixNamespace) return "xmlns";
return null;
}
private void PushAncestors(XElement e)
{
while (true)
{
e = e.parent as XElement;
if (e == null) break;
XAttribute a = e.lastAttr;
if (a != null)
{
do
{
a = a.next;
if (a.IsNamespaceDeclaration)
{
_resolver.AddFirst(a.Name.NamespaceName.Length == 0 ? string.Empty : a.Name.LocalName, XNamespace.Get(a.Value));
}
} while (a != e.lastAttr);
}
}
}
private void PushElement(XElement e)
{
_resolver.PushScope();
XAttribute a = e.lastAttr;
if (a != null)
{
do
{
a = a.next;
if (a.IsNamespaceDeclaration)
{
_resolver.Add(a.Name.NamespaceName.Length == 0 ? string.Empty : a.Name.LocalName, XNamespace.Get(a.Value));
}
} while (a != e.lastAttr);
}
}
private void WriteEndElement()
{
_writer.WriteEndElement();
_resolver.PopScope();
}
private void WriteFullEndElement()
{
_writer.WriteFullEndElement();
_resolver.PopScope();
}
private void WriteStartElement(XElement e)
{
PushElement(e);
XNamespace ns = e.Name.Namespace;
_writer.WriteStartElement(GetPrefixOfNamespace(ns, true), e.Name.LocalName, ns.NamespaceName);
XAttribute a = e.lastAttr;
if (a != null)
{
do
{
a = a.next;
ns = a.Name.Namespace;
string localName = a.Name.LocalName;
string namespaceName = ns.NamespaceName;
_writer.WriteAttributeString(GetPrefixOfNamespace(ns, false), localName, namespaceName.Length == 0 && localName == "xmlns" ? XNamespace.xmlnsPrefixNamespace : namespaceName, a.Value);
} while (a != e.lastAttr);
}
}
}
internal struct NamespaceResolver
{
class NamespaceDeclaration
{
public string prefix;
public XNamespace ns;
public int scope;
public NamespaceDeclaration prev;
}
private int _scope;
private NamespaceDeclaration _declaration;
private NamespaceDeclaration _rover;
public void PushScope()
{
_scope++;
}
public void PopScope()
{
NamespaceDeclaration d = _declaration;
if (d != null)
{
do
{
d = d.prev;
if (d.scope != _scope) break;
if (d == _declaration)
{
_declaration = null;
}
else
{
_declaration.prev = d.prev;
}
_rover = null;
} while (d != _declaration && _declaration != null);
}
_scope--;
}
public void Add(string prefix, XNamespace ns)
{
NamespaceDeclaration d = new NamespaceDeclaration();
d.prefix = prefix;
d.ns = ns;
d.scope = _scope;
if (_declaration == null)
{
_declaration = d;
}
else
{
d.prev = _declaration.prev;
}
_declaration.prev = d;
_rover = null;
}
public void AddFirst(string prefix, XNamespace ns)
{
NamespaceDeclaration d = new NamespaceDeclaration();
d.prefix = prefix;
d.ns = ns;
d.scope = _scope;
if (_declaration == null)
{
d.prev = d;
}
else
{
d.prev = _declaration.prev;
_declaration.prev = d;
}
_declaration = d;
_rover = null;
}
// Only elements allow default namespace declarations. The rover
// caches the last namespace declaration used by an element.
public string GetPrefixOfNamespace(XNamespace ns, bool allowDefaultNamespace)
{
if (_rover != null && _rover.ns == ns && (allowDefaultNamespace || _rover.prefix.Length > 0)) return _rover.prefix;
NamespaceDeclaration d = _declaration;
if (d != null)
{
do
{
d = d.prev;
if (d.ns == ns)
{
NamespaceDeclaration x = _declaration.prev;
while (x != d && x.prefix != d.prefix)
{
x = x.prev;
}
if (x == d)
{
if (allowDefaultNamespace)
{
_rover = d;
return d.prefix;
}
else if (d.prefix.Length > 0)
{
return d.prefix;
}
}
}
} while (d != _declaration);
}
return null;
}
}
internal struct StreamingElementWriter
{
private XmlWriter _writer;
private XStreamingElement _element;
private List<XAttribute> _attributes;
private NamespaceResolver _resolver;
public StreamingElementWriter(XmlWriter w)
{
_writer = w;
_element = null;
_attributes = new List<XAttribute>();
_resolver = new NamespaceResolver();
}
private void FlushElement()
{
if (_element != null)
{
PushElement();
XNamespace ns = _element.Name.Namespace;
_writer.WriteStartElement(GetPrefixOfNamespace(ns, true), _element.Name.LocalName, ns.NamespaceName);
foreach (XAttribute a in _attributes)
{
ns = a.Name.Namespace;
string localName = a.Name.LocalName;
string namespaceName = ns.NamespaceName;
_writer.WriteAttributeString(GetPrefixOfNamespace(ns, false), localName, namespaceName.Length == 0 && localName == "xmlns" ? XNamespace.xmlnsPrefixNamespace : namespaceName, a.Value);
}
_element = null;
_attributes.Clear();
}
}
private string GetPrefixOfNamespace(XNamespace ns, bool allowDefaultNamespace)
{
string namespaceName = ns.NamespaceName;
if (namespaceName.Length == 0) return string.Empty;
string prefix = _resolver.GetPrefixOfNamespace(ns, allowDefaultNamespace);
if (prefix != null) return prefix;
if ((object)namespaceName == (object)XNamespace.xmlPrefixNamespace) return "xml";
if ((object)namespaceName == (object)XNamespace.xmlnsPrefixNamespace) return "xmlns";
return null;
}
private void PushElement()
{
_resolver.PushScope();
foreach (XAttribute a in _attributes)
{
if (a.IsNamespaceDeclaration)
{
_resolver.Add(a.Name.NamespaceName.Length == 0 ? string.Empty : a.Name.LocalName, XNamespace.Get(a.Value));
}
}
}
private void Write(object content)
{
if (content == null) return;
XNode n = content as XNode;
if (n != null)
{
WriteNode(n);
return;
}
string s = content as string;
if (s != null)
{
WriteString(s);
return;
}
XAttribute a = content as XAttribute;
if (a != null)
{
WriteAttribute(a);
return;
}
XStreamingElement x = content as XStreamingElement;
if (x != null)
{
WriteStreamingElement(x);
return;
}
object[] o = content as object[];
if (o != null)
{
foreach (object obj in o) Write(obj);
return;
}
IEnumerable e = content as IEnumerable;
if (e != null)
{
foreach (object obj in e) Write(obj);
return;
}
WriteString(XContainer.GetStringValue(content));
}
private void WriteAttribute(XAttribute a)
{
if (_element == null) throw new InvalidOperationException(SR.InvalidOperation_WriteAttribute);
_attributes.Add(a);
}
private void WriteNode(XNode n)
{
FlushElement();
n.WriteTo(_writer);
}
internal void WriteStreamingElement(XStreamingElement e)
{
FlushElement();
_element = e;
Write(e.content);
bool contentWritten = _element == null;
FlushElement();
if (contentWritten)
{
_writer.WriteFullEndElement();
}
else
{
_writer.WriteEndElement();
}
_resolver.PopScope();
}
private void WriteString(string s)
{
FlushElement();
_writer.WriteString(s);
}
}
/// <summary>
/// Specifies the event type when an event is raised for an <see cref="XObject"/>.
/// </summary>
public enum XObjectChange
{
/// <summary>
/// An <see cref="XObject"/> has been or will be added to an <see cref="XContainer"/>.
/// </summary>
Add,
/// <summary>
/// An <see cref="XObject"/> has been or will be removed from an <see cref="XContainer"/>.
/// </summary>
Remove,
/// <summary>
/// An <see cref="XObject"/> has been or will be renamed.
/// </summary>
Name,
/// <summary>
/// The value of an <see cref="XObject"/> has been or will be changed.
/// There is a special case for elements. Change in the serialization
/// of an empty element (either from an empty tag to start/end tag
/// pair or vice versa) raises this event.
/// </summary>
Value,
}
/// <summary>
/// Specifies a set of options for Load().
/// </summary>
[Flags()]
public enum LoadOptions
{
/// <summary>Default options.</summary>
None = 0x00000000,
/// <summary>Preserve whitespace.</summary>
[SuppressMessage("Microsoft.Naming", "CA1702:CompoundWordsShouldBeCasedCorrectly", Justification = "Back-compat with System.Xml.")]
PreserveWhitespace = 0x00000001,
/// <summary>Set the BaseUri property.</summary>
SetBaseUri = 0x00000002,
/// <summary>Set the IXmlLineInfo.</summary>
SetLineInfo = 0x00000004,
}
/// <summary>
/// Specifies a set of options for Save().
/// </summary>
[Flags()]
public enum SaveOptions
{
/// <summary>Default options.</summary>
None = 0x00000000,
/// <summary>Disable formatting.</summary>
DisableFormatting = 0x00000001,
/// <summary>Remove duplicate namespace declarations.</summary>
OmitDuplicateNamespaces = 0x00000002,
}
/// <summary>
/// Specifies a set of options for CreateReader().
/// </summary>
[Flags()]
public enum ReaderOptions
{
/// <summary>Default options.</summary>
None = 0x00000000,
/// <summary>Remove duplicate namespace declarations.</summary>
OmitDuplicateNamespaces = 0x00000001,
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text.RegularExpressions;
using System.Xml;
using Microsoft.Build.Evaluation;
using Microsoft.Build.Shared;
using Microsoft.Build.Framework;
using Microsoft.Build.Execution;
using Microsoft.Build.Construction;
using Microsoft.Build.Collections;
using LoggingService = Microsoft.Build.BackEnd.Logging.LoggingService;
using LoggerMode = Microsoft.Build.BackEnd.Logging.LoggerMode;
using InvalidProjectFileException = Microsoft.Build.Exceptions.InvalidProjectFileException;
using InternalUtilities = Microsoft.Build.Internal.Utilities;
using Xunit;
namespace Microsoft.Build.UnitTests.Definition
{
public class ToolsetState_Tests
{
[Fact]
public void OverrideTasksAreFoundInOverridePath()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
ProjectCollection e = new ProjectCollection();
string dir = NativeMethodsShared.IsWindows ? "c:\\directory1\\directory2" : "/directory1/directory2";
string overrideDir = NativeMethodsShared.IsWindows ? "c:\\msbuildoverridetasks" : "/msbuildoverridetasks";
Toolset t = new Toolset("toolsversionname", dir, new PropertyDictionary<ProjectPropertyInstance>(), new ProjectCollection(), new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), overrideDir, new DirectoryExists(this.directoryExists));
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
TaskRegistry taskRegistry = (TaskRegistry)t.GetTaskRegistry(service, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), e.ProjectRootElementCache);
TaskRegistry taskoverrideRegistry = (TaskRegistry)t.GetOverrideTaskRegistry(service, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), e.ProjectRootElementCache);
string[] expectedRegisteredTasks = { "a1", "a2", "a3", "a4", "b1", "e1", "g1", "g2", "g3" };
string[] expectedOverrideTasks = { "a1" /* special because it is in the override tasks file as well as in the tasks file*/, "oa1", "oa2", "og1", "ooo" };
string[] unexpectedRegisteredTasks = { "c1", "d1", "f1", "11", "12", "13", "21", "oa1", "oa2", "og1", "ooo" };
string[] unexpectedOverrideRegisteredTasks = { "c1", "d1", "f1", "11", "12", "13", "21", "a2", "a3", "a4", "b1", "e1", "g1", "g2", "g3" };
foreach (string expectedRegisteredTask in expectedRegisteredTasks)
{
Assert.True(taskRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(expectedRegisteredTask, null)),
String.Format("Expected task '{0}' registered!", expectedRegisteredTask));
}
foreach (string expectedRegisteredTask in expectedOverrideTasks)
{
Assert.True(taskoverrideRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(expectedRegisteredTask, null)),
String.Format("Expected task '{0}' registered!", expectedRegisteredTask));
}
foreach (string unexpectedRegisteredTask in unexpectedRegisteredTasks)
{
Assert.False(taskRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(unexpectedRegisteredTask, null)),
String.Format("Unexpected task '{0}' registered!", unexpectedRegisteredTask));
}
foreach (string unexpectedRegisteredTask in unexpectedOverrideRegisteredTasks)
{
Assert.False(taskoverrideRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(unexpectedRegisteredTask, null)),
String.Format("Unexpected task '{0}' registered!", unexpectedRegisteredTask));
}
}
[Fact]
public void OverrideTaskPathIsRelative()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
ProjectCollection e = new ProjectCollection();
Toolset t = new Toolset("toolsversionname", "c:\\directory1\\directory2", new PropertyDictionary<ProjectPropertyInstance>(), new ProjectCollection(), new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), "msbuildoverridetasks", new DirectoryExists(this.directoryExists));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
TaskRegistry taskoverrideRegistry = (TaskRegistry)t.GetOverrideTaskRegistry(service, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), e.ProjectRootElementCache);
Assert.NotNull(taskoverrideRegistry);
Assert.Empty(taskoverrideRegistry.TaskRegistrations);
string rootedPathMessage = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("OverrideTaskNotRootedPath", "msbuildoverridetasks");
mockLogger.AssertLogContains(ResourceUtilities.FormatResourceStringStripCodeAndKeyword("OverrideTasksFileFailure", rootedPathMessage));
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.Netcoreapp)]
public void OverrideTaskPathHasInvalidChars()
{
ProjectCollection e = new ProjectCollection();
Toolset t = new Toolset("toolsversionname", "c:\\directory1\\directory2", new PropertyDictionary<ProjectPropertyInstance>(), new ProjectCollection(), new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), "k:\\||^%$#*msbuildoverridetasks", new DirectoryExists(this.directoryExists));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
TaskRegistry taskoverrideRegistry = (TaskRegistry)t.GetOverrideTaskRegistry(service, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), e.ProjectRootElementCache);
Assert.NotNull(taskoverrideRegistry);
Assert.Empty(taskoverrideRegistry.TaskRegistrations);
mockLogger.AssertLogContains("MSB4194");
}
[Fact]
public void OverrideTaskPathHasTooLongOfAPath()
{
string tooLong = "c:\\" + new string('C', 6000);
ProjectCollection e = new ProjectCollection();
Toolset t = new Toolset("toolsversionname", "c:\\directory1\\directory2", new PropertyDictionary<ProjectPropertyInstance>(), new ProjectCollection(), new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), tooLong, new DirectoryExists(this.directoryExists));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
TaskRegistry taskoverrideRegistry = (TaskRegistry)t.GetOverrideTaskRegistry(service, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), e.ProjectRootElementCache);
Assert.NotNull(taskoverrideRegistry);
Assert.Empty(taskoverrideRegistry.TaskRegistrations);
string rootedPathMessage = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("OverrideTaskNotRootedPath", tooLong);
mockLogger.AssertLogContains(ResourceUtilities.FormatResourceStringStripCodeAndKeyword("OverrideTasksFileFailure", rootedPathMessage));
}
[Fact]
public void OverrideTaskPathIsNotFound()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
ProjectCollection e = new ProjectCollection();
Toolset t = new Toolset("toolsversionname", "c:\\directory1\\directory2", new PropertyDictionary<ProjectPropertyInstance>(), new ProjectCollection(), new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), "k:\\Thecatinthehat", new DirectoryExists(this.directoryExists));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
TaskRegistry taskoverrideRegistry = (TaskRegistry)t.GetOverrideTaskRegistry(service, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), e.ProjectRootElementCache);
Assert.NotNull(taskoverrideRegistry);
Assert.Empty(taskoverrideRegistry.TaskRegistrations);
string rootedPathMessage = ResourceUtilities.FormatResourceStringStripCodeAndKeyword("OverrideTaskNotRootedPath", "k:\\Thecatinthehat");
mockLogger.AssertLogContains(ResourceUtilities.FormatResourceStringStripCodeAndKeyword("OverrideTasksFileFailure", rootedPathMessage));
}
[Fact]
public void DefaultTasksAreFoundInToolsPath()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
Toolset t = new Toolset(
"toolsversionname",
NativeMethodsShared.IsWindows ? "c:\\directory1\\directory2" : "/directory1/directory2",
new PropertyDictionary<ProjectPropertyInstance>(),
new ProjectCollection(),
new DirectoryGetFiles(this.getFiles),
new LoadXmlFromPath(this.loadXmlFromPath),
null,
new DirectoryExists(this.directoryExists));
TaskRegistry taskRegistry = (TaskRegistry)t.GetTaskRegistry(null, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), ProjectCollection.GlobalProjectCollection.ProjectRootElementCache);
string[] expectedRegisteredTasks = { "a1", "a2", "a3", "a4", "b1", "e1", "g1", "g2", "g3" };
string[] unexpectedRegisteredTasks = { "c1", "d1", "f1", "11", "12", "13", "21" };
foreach (string expectedRegisteredTask in expectedRegisteredTasks)
{
Assert.True(taskRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(expectedRegisteredTask, null)),
String.Format("Expected task '{0}' registered!", expectedRegisteredTask));
}
foreach (string unexpectedRegisteredTask in unexpectedRegisteredTasks)
{
Assert.False(taskRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(unexpectedRegisteredTask, null)),
String.Format("Unexpected task '{0}' registered!", unexpectedRegisteredTask));
}
}
[Fact]
public void WarningLoggedIfNoDefaultTasksFound()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
Toolset t = new Toolset("toolsversionname", "c:\\directory1\\directory2\\doesntexist", new PropertyDictionary<ProjectPropertyInstance>(), new ProjectCollection(), new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), null, new DirectoryExists(this.directoryExists));
TaskRegistry taskRegistry = (TaskRegistry)t.GetTaskRegistry(service, BuildEventContext.Invalid, ProjectCollection.GlobalProjectCollection.ProjectRootElementCache);
string[] unexpectedRegisteredTasks = { "a1", "a2", "a3", "a4", "b1", "c1", "d1", "e1", "f1", "g1", "g2", "g3", "11", "12", "13", "21" };
Assert.Equal(1, mockLogger.WarningCount); // "Expected 1 warning logged!"
foreach (string unexpectedRegisteredTask in unexpectedRegisteredTasks)
{
Assert.False(taskRegistry.TaskRegistrations.ContainsKey(new TaskRegistry.RegisteredTaskIdentity(unexpectedRegisteredTask, null)),
String.Format("Unexpected task '{0}' registered!", unexpectedRegisteredTask));
}
}
[Fact]
public void InvalidToolPath()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
Toolset t = new Toolset("toolsversionname", "invalid||path", new PropertyDictionary<ProjectPropertyInstance>(), p, new DirectoryGetFiles(this.getFiles), new LoadXmlFromPath(this.loadXmlFromPath), null, new DirectoryExists(this.directoryExists));
TaskRegistry taskRegistry = (TaskRegistry)t.GetTaskRegistry(service, BuildEventContext.Invalid, ProjectCollection.GlobalProjectCollection.ProjectRootElementCache);
Console.WriteLine(mockLogger.FullLog);
Assert.Equal(1, mockLogger.WarningCount); // "Expected a warning for invalid character in toolpath"
}
/// <summary>
/// Make sure when we read in the tasks files off disk that they come in a sorted order so that there is a deterministic way of
/// figuring out the order the files were read in.
/// </summary>
[Fact]
public void VerifyTasksFilesAreInSortedOrder()
{
//Note Engine's BinPath is distinct from the ToolsVersion's ToolsPath
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
string dir = NativeMethodsShared.IsWindows ? "c:\\directory1\\directory2" : "/directory1/directory2";
string overrideDir = NativeMethodsShared.IsWindows ? "c:\\msbuildoverridetasks" : "/msbuildoverridetasks";
string[] foundFiles = Toolset.GetTaskFiles(
new DirectoryGetFiles(this.getFiles),
service,
BuildEventContext.Invalid,
"*.tasks",
dir,
String.Empty);
string[] foundoverrideFiles = Toolset.GetTaskFiles(
new DirectoryGetFiles(this.getFiles),
service,
BuildEventContext.Invalid,
"*.overridetasks",
overrideDir,
String.Empty);
List<string> sortedTasksExpectedPaths = new List<string>();
List<string> sortedOverrideExpectedPaths = new List<string>();
foreach (DefaultTasksFile file in _defaultTasksFileCandidates)
{
if (Path.GetDirectoryName(file.Path).Equals(dir, StringComparison.OrdinalIgnoreCase)
&& file.Path.EndsWith(".tasks", StringComparison.OrdinalIgnoreCase))
{
sortedTasksExpectedPaths.Add(file.Path);
}
if (Path.GetDirectoryName(file.Path).Equals(overrideDir, StringComparison.OrdinalIgnoreCase)
&& file.Path.EndsWith(".overridetasks", StringComparison.OrdinalIgnoreCase))
{
sortedOverrideExpectedPaths.Add(file.Path);
}
}
sortedTasksExpectedPaths.Sort(StringComparer.OrdinalIgnoreCase);
sortedOverrideExpectedPaths.Sort(StringComparer.OrdinalIgnoreCase);
Assert.Equal(sortedTasksExpectedPaths.Count, foundFiles.Length);
for (int i = 0; i < foundFiles.Length; i++)
{
Assert.Equal(foundFiles[i], sortedTasksExpectedPaths[i]);
}
Assert.Equal(sortedOverrideExpectedPaths.Count, foundoverrideFiles.Length);
for (int i = 0; i < foundoverrideFiles.Length; i++)
{
Assert.Equal(foundoverrideFiles[i], sortedOverrideExpectedPaths[i]);
}
}
[Fact]
public void InvalidToolsVersionTooHighMappedToCurrent()
{
string oldLegacyToolsVersion = Environment.GetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION");
string oldTreatHigherToolsVersions = Environment.GetEnvironmentVariable("MSBUILDTREATHIGHERTOOLSVERSIONASCURRENT");
try
{
Environment.SetEnvironmentVariable("MSBUILDTREATHIGHERTOOLSVERSIONASCURRENT", "1");
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", "1");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='98.6' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"98.6\"");
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDTREATHIGHERTOOLSVERSIONASCURRENT", oldTreatHigherToolsVersions);
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", oldLegacyToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
[Fact]
public void InvalidToolsVersionMissingLowMappedToCurrent()
{
string oldLegacyToolsVersion = Environment.GetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", "1");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='0.1' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"0.1\"");
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", oldLegacyToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
[Fact]
public void InvalidToolsVersionMissingMappedToCurrent()
{
string oldLegacyToolsVersion = Environment.GetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", "1");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='invalidToolsVersion' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"invalidToolsVersion\"");
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", oldLegacyToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
[Fact]
public void InvalidToolsVersion()
{
Assert.Throws<InvalidProjectFileException>(() =>
{
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='invalidToolsVersion' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, "goober", p);
success = project.Build(mockLogger);
// BANG!
}
);
}
/// <summary>
/// Even a valid toolsversion should be forced to the current ToolsVersion if MSBUILDTREATALLTOOLSVERSIONSASCURRENT
/// is set.
/// </summary>
[Fact]
public void ToolsVersionMappedToCurrent()
{
string oldLegacyToolsVersion = Environment.GetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION");
string oldForceToolsVersionToCurrent = Environment.GetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT");
try
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", "1");
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", "1");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", oldLegacyToolsVersion);
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", oldForceToolsVersionToCurrent);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
#if FEATURE_MULTIPLE_TOOLSETS
/// <summary>
/// Validate that a custom defined toolset is honored
/// </summary>
[Fact]
public void CustomToolsVersionIsHonored()
{
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", String.Empty);
try
{
string content = @"<Project ToolsVersion=""14.0"" xmlns=""http://schemas.microsoft.com/developer/msbuild/2003"">
<Target Name=""a"">
<Message Text=""[$(MSBUILDTOOLSVERSION)]"" />
</Target>
</Project>
";
string projectPath = Path.GetTempFileName();
File.WriteAllText(projectPath, content);
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
Toolset source = p.GetToolset("Current");
Toolset potato = new Toolset("potato", source.ToolsPath, ProjectCollection.GlobalProjectCollection, source.ToolsPath);
p.AddToolset(potato);
bool success = false;
Project project = p.LoadProject(projectPath, "potato");
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("[potato]");
}
finally
{
// Nothing
}
}
/// <summary>
/// If the current ToolsVersion doesn't exist, we should fall back to what's in the project file.
/// </summary>
[Fact]
public void ToolsVersionFallbackIfCurrentToolsVersionDoesNotExist()
{
ProjectCollection p = new ProjectCollection();
p.RemoveToolset(ObjectModelHelpers.MSBuildDefaultToolsVersion);
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
Assert.Equal("4.0", project.ToolsVersion);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("\"4.0\"");
mockLogger.AssertLogDoesntContain(ObjectModelHelpers.CleanupFileContents("\"msbuilddefaulttoolsversion\""));
}
#endif
/// <summary>
/// If MSBUILDTREATALLTOOLSVERSIONSASCURRENT is not set, and there is not an explicit ToolsVersion passed to the project,
/// then if MSBUILDDEFAULTTOOLSVERSION is set and exists, use that ToolsVersion.
/// </summary>
[Fact]
public void ToolsVersionFromEnvironmentVariable()
{
string oldDefaultToolsVersion = Environment.GetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", "foo");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
p.AddToolset(new Toolset("foo", @"c:\foo", p, @"c:\foo\override"));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
mockLogger.AssertLogContains("ToolsVersion=\"foo\"");
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", oldDefaultToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
/// <summary>
/// If MSBUILDTREATALLTOOLSVERSIONSASCURRENT is not set, and there is not an explicit ToolsVersion passed to the project,
/// and if MSBUILDDEFAULTTOOLSVERSION is set but to an invalid ToolsVersion, fall back to current.
/// </summary>
[Fact]
public void InvalidToolsVersionFromEnvironmentVariable()
{
string oldDefaultToolsVersion = Environment.GetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", "foo");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = project.Build(mockLogger);
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
// falls back to the current ToolsVersion
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", oldDefaultToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
/// <summary>
/// Even a valid toolsversion should be forced to the current ToolsVersion if MSBUILDTREATALLTOOLSVERSIONSASCURRENT
/// is set.
/// </summary>
[Fact]
public void ToolsVersionMappedToCurrent_CreateProjectInstance()
{
string oldLegacyToolsVersion = Environment.GetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION");
string oldForceToolsVersionToCurrent = Environment.GetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT");
try
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", "1");
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", "1");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = project.CreateProjectInstance();
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", oldLegacyToolsVersion);
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", oldForceToolsVersionToCurrent);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
#if FEATURE_MULTIPLE_TOOLSETS
/// <summary>
/// If the current ToolsVersion doesn't exist, we should fall back to what's in the project file.
/// </summary>
[Fact]
public void ToolsVersionFallbackIfCurrentToolsVersionDoesNotExist_CreateProjectInstance()
{
ProjectCollection p = new ProjectCollection();
p.RemoveToolset(ObjectModelHelpers.MSBuildDefaultToolsVersion);
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = project.CreateProjectInstance();
Assert.Equal("4.0", pi.ToolsVersion);
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("\"4.0\"");
mockLogger.AssertLogDoesntContain(ObjectModelHelpers.CleanupFileContents("\"msbuilddefaulttoolsversion\""));
}
#endif
/// <summary>
/// If MSBUILDTREATALLTOOLSVERSIONSASCURRENT is not set, and there is not an explicit ToolsVersion passed to the project,
/// then if MSBUILDDEFAULTTOOLSVERSION is set and exists, use that ToolsVersion.
/// </summary>
[Fact]
public void ToolsVersionFromEnvironmentVariable_CreateProjectInstance()
{
string oldDefaultToolsVersion = Environment.GetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", "foo");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
p.AddToolset(new Toolset("foo", @"c:\foo", p, @"c:\foo\override"));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = project.CreateProjectInstance();
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
mockLogger.AssertLogContains("ToolsVersion=\"foo\"");
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", oldDefaultToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
#if FEATURE_MULTIPLE_TOOLSETS
/// <summary>
/// If MSBUILDTREATALLTOOLSVERSIONSASCURRENT is not set, and there is not an explicit ToolsVersion passed to the project,
/// and if MSBUILDDEFAULTTOOLSVERSION is set but to an invalid ToolsVersion, fall back to current.
/// </summary>
[Fact]
public void InvalidToolsVersionFromEnvironmentVariable_CreateProjectInstance()
{
string oldDefaultToolsVersion = Environment.GetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", "foo");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = project.CreateProjectInstance();
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
// falls back to the current ToolsVersion
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", oldDefaultToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
#endif
/// <summary>
/// Even a valid toolsversion should be forced to the current ToolsVersion if MSBUILDTREATALLTOOLSVERSIONSASCURRENT
/// is set.
/// </summary>
[Fact]
public void ToolsVersionMappedToCurrent_ProjectInstance()
{
string oldLegacyToolsVersion = Environment.GetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION");
string oldForceToolsVersionToCurrent = Environment.GetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT");
try
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", "1");
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", "1");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = new ProjectInstance(project.Xml, null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDLEGACYDEFAULTTOOLSVERSION", oldLegacyToolsVersion);
Environment.SetEnvironmentVariable("MSBUILDTREATALLTOOLSVERSIONSASCURRENT", oldForceToolsVersionToCurrent);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
#if FEATURE_MULTIPLE_TOOLSETS
/// <summary>
/// If the current ToolsVersion doesn't exist, we should fall back to what's in the project file.
/// </summary>
[Fact]
public void ToolsVersionFallbackIfCurrentToolsVersionDoesNotExist_ProjectInstance()
{
ProjectCollection p = new ProjectCollection();
p.RemoveToolset(ObjectModelHelpers.MSBuildDefaultToolsVersion);
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = new ProjectInstance(project.Xml, null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
Assert.Equal("4.0", pi.ToolsVersion);
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("\"4.0\"");
mockLogger.AssertLogDoesntContain(ObjectModelHelpers.CleanupFileContents("\"msbuilddefaulttoolsversion\""));
}
/// <summary>
/// If MSBUILDTREATALLTOOLSVERSIONSASCURRENT is not set, and there is not an explicit ToolsVersion passed to the project,
/// then if MSBUILDDEFAULTTOOLSVERSION is set and exists, use that ToolsVersion.
/// </summary>
[Fact]
public void ToolsVersionFromEnvironmentVariable_ProjectInstance()
{
string oldDefaultToolsVersion = Environment.GetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", "foo");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
p.AddToolset(new Toolset("foo", @"c:\foo", p, @"c:\foo\override"));
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = new ProjectInstance(project.Xml, null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
mockLogger.AssertLogContains("ToolsVersion=\"foo\"");
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", oldDefaultToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
#endif
/// <summary>
/// If MSBUILDTREATALLTOOLSVERSIONSASCURRENT is not set, and there is not an explicit ToolsVersion passed to the project,
/// and if MSBUILDDEFAULTTOOLSVERSION is set but to an invalid ToolsVersion, fall back to current.
/// </summary>
[Fact]
public void InvalidToolsVersionFromEnvironmentVariable_ProjectInstance()
{
string oldDefaultToolsVersion = Environment.GetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION");
try
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", "foo");
InternalUtilities.RefreshInternalEnvironmentValues();
ProjectCollection p = new ProjectCollection();
MockLogger mockLogger = new MockLogger();
LoggingService service = (LoggingService)LoggingService.CreateLoggingService(LoggerMode.Synchronous, 1);
service.RegisterLogger(mockLogger);
bool success = false;
Project project = new Project(XmlReader.Create(new StringReader(@"<Project ToolsVersion='4.0' xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<Target Name='Foo'>
</Target>
</Project>")), null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
ProjectInstance pi = new ProjectInstance(project.Xml, null /* no global properties */, null /* don't explicitly set the toolsversion */, p);
success = pi.Build(new ILogger[] { mockLogger });
Assert.True(success);
mockLogger.AssertLogContains("ToolsVersion=\"4.0\"");
// falls back to the current ToolsVersion
mockLogger.AssertLogContains(ObjectModelHelpers.CleanupFileContents("ToolsVersion=\"msbuilddefaulttoolsversion\""));
}
finally
{
Environment.SetEnvironmentVariable("MSBUILDDEFAULTTOOLSVERSION", oldDefaultToolsVersion);
InternalUtilities.RefreshInternalEnvironmentValues();
}
}
/// <summary>
/// Inline tasks found in a .tasks file only have properties expanded.
/// (When they are in a regular MSBuild file, items are also expanded.)
/// </summary>
[Fact]
public void InlineTasksInDotTasksFile()
{
Toolset t = new Toolset(
"t",
NativeMethodsShared.IsWindows ? "c:\\inline" : "/inline",
new PropertyDictionary<ProjectPropertyInstance>(),
new ProjectCollection(),
new DirectoryGetFiles(this.getFiles),
new LoadXmlFromPath(this.loadXmlFromPath),
null,
new DirectoryExists(directoryExists));
TaskRegistry taskRegistry = (TaskRegistry)t.GetTaskRegistry(null, new BuildEventContext(1, 2, BuildEventContext.InvalidProjectContextId, 4), ProjectCollection.GlobalProjectCollection.ProjectRootElementCache);
// Did not crash due to trying to expand items without having items
}
public ToolsetState_Tests()
{
_defaultTasksFileMap = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
foreach (DefaultTasksFile defaultTasksFileCandidate in _defaultTasksFileCandidates)
{
_defaultTasksFileMap.Add(defaultTasksFileCandidate.Path, defaultTasksFileCandidate.XmlContents);
}
}
private bool directoryExists(string path)
{
// run through directory exits to throw the correct exceptions if there are any
Directory.Exists(path);
return path.Contains("msbuildoverridetasks");
}
private string[] getFiles(string path, string pattern)
{
// Cause an exception if the path is invalid
Path.GetFileName(path);
string pathWithoutTrailingSlash = path.EndsWith(Path.DirectorySeparatorChar.ToString())
? path.Substring(0, path.Length - 1)
: path;
//NOTE: the Replace calls below are a very minimal attempt to convert a basic, cmd.exe-style wildcard
//into something Regex.IsMatch will know how to use.
string finalPattern = "^" + pattern.Replace(".", "\\.").Replace("*", "[\\w\\W]*") + "$";
List<string> matches = new List<string>(_defaultTasksFileMap.Keys);
matches.RemoveAll(
delegate (string candidate)
{
bool sameFolder = (0 == String.Compare(Path.GetDirectoryName(candidate),
pathWithoutTrailingSlash,
StringComparison.OrdinalIgnoreCase));
return !sameFolder || !Regex.IsMatch(Path.GetFileName(candidate), finalPattern);
});
return matches.ToArray();
}
private XmlDocumentWithLocation loadXmlFromPath(string path)
{
string xmlContents = _defaultTasksFileMap[path];
XmlDocumentWithLocation xmlDocument = new XmlDocumentWithLocation();
xmlDocument.LoadXml(xmlContents);
return xmlDocument;
}
private readonly Dictionary<string, string> _defaultTasksFileMap;
private DefaultTasksFile[] _defaultTasksFileCandidates =
{
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\directory1\\directory2\\a.tasks"
: "/directory1/directory2/a.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='a1' AssemblyName='a' />
<UsingTask TaskName='a2' AssemblyName='a' />
<UsingTask TaskName='a3' AssemblyName='a' />
<UsingTask TaskName='a4' AssemblyName='a' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\directory1\\directory2\\b.tasks"
: "/directory1/directory2/b.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='b1' AssemblyName='b' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\directory1\\directory2\\c.tasksfile"
: "/directory1/directory2/c.taskfile",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='c1' AssemblyName='c' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\directory1\\directory2\\directory3\\d.tasks"
: "/directory1/directory2/directory3/d.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='d1' AssemblyName='d' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\directory1\\directory2\\e.tasks"
: "/directory1/directory2/e.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='e1' AssemblyName='e' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "d:\\directory1\\directory2\\f.tasks"
: "/d/directory1/directory2/f.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='f1' AssemblyName='f' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\directory1\\directory2\\g.custom.tasks"
: "/directory1/directory2/g.custom.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='g1' AssemblyName='g' />
<UsingTask TaskName='g2' AssemblyName='g' />
<UsingTask TaskName='g3' AssemblyName='g' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\somepath\\1.tasks"
: "/somepath/1.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='11' AssemblyName='1' />
<UsingTask TaskName='12' AssemblyName='1' />
<UsingTask TaskName='13' AssemblyName='1' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\somepath\\2.tasks"
: "/somepath/2.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='21' AssemblyName='2' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\inline\\inlinetasks.tasks"
: "/inline/inlinetasks.tasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='t2' AssemblyName='an' Condition='true' TaskFactory='AssemblyFactory' Runtime='CLR2' Architecture='x86' RequiredRuntime='2.0' RequiredPlatform='x86'>
<ParameterGroup>
<MyParameter ParameterType='System.String' Output='true' Required='false'/>
</ParameterGroup>
<Task>
x
</Task>
</UsingTask>
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\msbuildoverridetasks\\1.overridetasks"
: "/msbuildoverridetasks/1.overridetasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='a1' AssemblyName='o' />
<UsingTask TaskName='oa1' AssemblyName='o' />
<UsingTask TaskName='oa2' AssemblyName='o' />
<UsingTask TaskName='og1' AssemblyName='o' />
</Project>"),
new DefaultTasksFile(NativeMethodsShared.IsWindows
? "c:\\msbuildoverridetasks\\2.overridetasks"
: "/msbuildoverridetasks/2.overridetasks",
@"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003'>
<UsingTask TaskName='ooo' AssemblyName='o' />
</Project>")
};
public struct DefaultTasksFile
{
public string Path;
public string XmlContents;
public DefaultTasksFile(string path, string xmlContents)
{
this.Path = path;
this.XmlContents = xmlContents;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.Threading.Tasks.Tests
{
public static class CancellationTokenTests
{
[Fact]
public static void CancellationTokenRegister_Exceptions()
{
CancellationToken token = new CancellationToken();
Assert.Throws<ArgumentNullException>(() => token.Register(null));
Assert.Throws<ArgumentNullException>(() => token.Register(null, false));
Assert.Throws<ArgumentNullException>(() => token.Register(null, null));
}
[Fact]
public static void CancellationTokenEquality()
{
//simple empty token comparisons
Assert.Equal(new CancellationToken(), new CancellationToken());
//inflated empty token comparisons
CancellationToken inflated_empty_CT1 = new CancellationToken();
bool temp1 = inflated_empty_CT1.CanBeCanceled; // inflate the CT
CancellationToken inflated_empty_CT2 = new CancellationToken();
bool temp2 = inflated_empty_CT2.CanBeCanceled; // inflate the CT
Assert.Equal(inflated_empty_CT1, new CancellationToken());
Assert.Equal(new CancellationToken(), inflated_empty_CT1);
Assert.Equal(inflated_empty_CT1, inflated_empty_CT2);
// inflated pre-set token comparisons
CancellationToken inflated_defaultSet_CT1 = new CancellationToken(true);
bool temp3 = inflated_defaultSet_CT1.CanBeCanceled; // inflate the CT
CancellationToken inflated_defaultSet_CT2 = new CancellationToken(true);
bool temp4 = inflated_defaultSet_CT2.CanBeCanceled; // inflate the CT
Assert.Equal(inflated_defaultSet_CT1, new CancellationToken(true));
Assert.Equal(inflated_defaultSet_CT1, inflated_defaultSet_CT2);
// Things that are not equal
Assert.NotEqual(inflated_empty_CT1, inflated_defaultSet_CT2);
Assert.NotEqual(inflated_empty_CT1, new CancellationToken(true));
Assert.NotEqual(new CancellationToken(true), inflated_empty_CT1);
}
[Fact]
public static void CancellationToken_GetHashCode()
{
CancellationTokenSource cts = new CancellationTokenSource();
CancellationToken ct = cts.Token;
int hash1 = cts.GetHashCode();
int hash2 = cts.Token.GetHashCode();
int hash3 = ct.GetHashCode();
Assert.Equal(hash1, hash2);
Assert.Equal(hash2, hash3);
CancellationToken defaultUnsetToken1 = new CancellationToken();
CancellationToken defaultUnsetToken2 = new CancellationToken();
int hashDefaultUnset1 = defaultUnsetToken1.GetHashCode();
int hashDefaultUnset2 = defaultUnsetToken2.GetHashCode();
Assert.Equal(hashDefaultUnset1, hashDefaultUnset2);
CancellationToken defaultSetToken1 = new CancellationToken(true);
CancellationToken defaultSetToken2 = new CancellationToken(true);
int hashDefaultSet1 = defaultSetToken1.GetHashCode();
int hashDefaultSet2 = defaultSetToken2.GetHashCode();
Assert.Equal(hashDefaultSet1, hashDefaultSet2);
Assert.NotEqual(hash1, hashDefaultUnset1);
Assert.NotEqual(hash1, hashDefaultSet1);
Assert.NotEqual(hashDefaultUnset1, hashDefaultSet1);
}
[Fact]
public static void CancellationToken_EqualityAndDispose()
{
//hashcode.
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
cts.Token.GetHashCode();
});
//x.Equals(y)
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
cts.Token.Equals(new CancellationToken());
});
//x.Equals(y)
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
new CancellationToken().Equals(cts.Token);
});
//x==y
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
bool result = cts.Token == new CancellationToken();
});
//x==y
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
bool result = new CancellationToken() == cts.Token;
});
//x!=y
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
bool result = cts.Token != new CancellationToken();
});
//x!=y
Assert.Throws<ObjectDisposedException>(
() =>
{
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
bool result = new CancellationToken() != cts.Token;
});
}
[Fact]
public static void TokenSourceDispose()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
CancellationTokenRegistration preDisposeRegistration = token.Register(() => { });
//WaitHandle and Dispose
WaitHandle wh = token.WaitHandle; //ok
Assert.NotNull(wh);
tokenSource.Dispose();
// Regression test: allow ctr.Dispose() to succeed when the backing cts has already been disposed.
try
{
preDisposeRegistration.Dispose();
}
catch
{
Assert.True(false, string.Format("TokenSourceDispose: > ctr.Dispose() failed when referring to a disposed CTS"));
}
bool cr = tokenSource.IsCancellationRequested; //this is ok after dispose.
tokenSource.Dispose(); //Repeat calls to Dispose should be ok.
}
/// <summary>
/// Test passive signalling.
///
/// Gets a token, then polls on its ThrowIfCancellationRequested property.
/// </summary>
/// <returns></returns>
[Fact]
public static void CancellationTokenPassiveListening()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
Assert.False(token.IsCancellationRequested,
"CancellationTokenPassiveListening: Cancellation should not have occurred yet.");
tokenSource.Cancel();
Assert.True(token.IsCancellationRequested,
"CancellationTokenPassiveListening: Cancellation should now have occurred.");
}
/// <summary>
/// Test active signalling.
///
/// Gets a token, registers a notification callback and ensure it is called.
/// </summary>
/// <returns></returns>
[Fact]
public static void CancellationTokenActiveListening()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
bool signalReceived = false;
token.Register(() => signalReceived = true);
Assert.False(signalReceived,
"CancellationTokenActiveListening: Cancellation should not have occurred yet.");
tokenSource.Cancel();
Assert.True(signalReceived,
"CancellationTokenActiveListening: Cancellation should now have occurred and caused a signal.");
}
private static event EventHandler AddAndRemoveDelegates_TestEvent;
[Fact]
public static void AddAndRemoveDelegates()
{
//Test various properties of callbacks:
// 1. the same handler can be added multiple times
// 2. removing a handler only removes one instance of a repeat
// 3. after some add and removes, everything appears to be correct
// 4. The behaviour matches the behaviour of a regular Event(Multicast-delegate).
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
List<string> output = new List<string>();
Action action1 = () => output.Add("action1");
Action action2 = () => output.Add("action2");
CancellationTokenRegistration reg1 = token.Register(action1);
CancellationTokenRegistration reg2 = token.Register(action2);
CancellationTokenRegistration reg3 = token.Register(action2);
CancellationTokenRegistration reg4 = token.Register(action1);
reg2.Dispose();
reg3.Dispose();
reg4.Dispose();
tokenSource.Cancel();
Assert.Equal(1, output.Count);
Assert.Equal("action1", output[0]);
// and prove this is what normal events do...
output.Clear();
EventHandler handler1 = (sender, obj) => output.Add("handler1");
EventHandler handler2 = (sender, obj) => output.Add("handler2");
AddAndRemoveDelegates_TestEvent += handler1;
AddAndRemoveDelegates_TestEvent += handler2;
AddAndRemoveDelegates_TestEvent += handler2;
AddAndRemoveDelegates_TestEvent += handler1;
AddAndRemoveDelegates_TestEvent -= handler2;
AddAndRemoveDelegates_TestEvent -= handler2;
AddAndRemoveDelegates_TestEvent -= handler1;
AddAndRemoveDelegates_TestEvent(null, EventArgs.Empty);
Assert.Equal(1, output.Count);
Assert.Equal("handler1", output[0]);
}
/// <summary>
/// Test late enlistment.
///
/// If a handler is added to a 'canceled' cancellation token, the handler is called immediately.
/// </summary>
/// <returns></returns>
[Fact]
public static void CancellationTokenLateEnlistment()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
bool signalReceived = false;
tokenSource.Cancel(); //Signal
//Late enlist.. should fire the delegate synchronously
token.Register(() => signalReceived = true);
Assert.True(signalReceived,
"CancellationTokenLateEnlistment: The signal should have been received even after late enlistment.");
}
/// <summary>
/// Test the wait handle exposed by the cancellation token
///
/// The signal occurs on a separate thread, and should happen after the wait begins.
/// </summary>
/// <returns></returns>
[Fact]
public static void CancellationTokenWaitHandle_SignalAfterWait()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
Task.Run(
() =>
{
tokenSource.Cancel(); //Signal
});
token.WaitHandle.WaitOne();
Assert.True(token.IsCancellationRequested,
"CancellationTokenWaitHandle_SignalAfterWait: the token should have been canceled.");
}
/// <summary>
/// Test the wait handle exposed by the cancellation token
///
/// The signal occurs on a separate thread, and should happen after the wait begins.
/// </summary>
/// <returns></returns>
[Fact]
public static void CancellationTokenWaitHandle_SignalBeforeWait()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
tokenSource.Cancel();
token.WaitHandle.WaitOne(); // the wait handle should already be set.
Assert.True(token.IsCancellationRequested,
"CancellationTokenWaitHandle_SignalBeforeWait: the token should have been canceled.");
}
/// <summary>
/// Test that WaitAny can be used with a CancellationToken.WaitHandle
/// </summary>
/// <returns></returns>
[Fact]
public static void CancellationTokenWaitHandle_WaitAny()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
CancellationToken tokenNoSource = new CancellationToken();
tokenSource.Cancel();
WaitHandle.WaitAny(new[] { token.WaitHandle, tokenNoSource.WaitHandle }); //make sure the dummy tokens has a valid WaitHanle
Assert.True(token.IsCancellationRequested,
"CancellationTokenWaitHandle_WaitAny: The token should have been canceled.");
}
[Fact]
public static void CreateLinkedTokenSource_Simple_TwoToken()
{
CancellationTokenSource signal1 = new CancellationTokenSource();
CancellationTokenSource signal2 = new CancellationTokenSource();
//Neither token is signalled.
CancellationTokenSource combined = CancellationTokenSource.CreateLinkedTokenSource(signal1.Token, signal2.Token);
Assert.False(combined.IsCancellationRequested,
"CreateLinkedToken_Simple_TwoToken: The combined token should start unsignalled");
signal1.Cancel();
Assert.True(combined.IsCancellationRequested,
"CreateLinkedToken_Simple_TwoToken: The combined token should now be signalled");
}
[Fact]
public static void CreateLinkedTokenSource_Simple_MultiToken()
{
CancellationTokenSource signal1 = new CancellationTokenSource();
CancellationTokenSource signal2 = new CancellationTokenSource();
CancellationTokenSource signal3 = new CancellationTokenSource();
//Neither token is signalled.
CancellationTokenSource combined = CancellationTokenSource.CreateLinkedTokenSource(new[] { signal1.Token, signal2.Token, signal3.Token });
Assert.False(combined.IsCancellationRequested,
"CreateLinkedToken_Simple_MultiToken: The combined token should start unsignalled");
signal1.Cancel();
Assert.True(combined.IsCancellationRequested,
"CreateLinkedToken_Simple_MultiToken: The combined token should now be signalled");
}
[Fact]
public static void CreateLinkedToken_SourceTokenAlreadySignalled()
{
//creating a combined token, when a source token is already signalled.
CancellationTokenSource signal1 = new CancellationTokenSource();
CancellationTokenSource signal2 = new CancellationTokenSource();
signal1.Cancel(); //early signal.
CancellationTokenSource combined = CancellationTokenSource.CreateLinkedTokenSource(signal1.Token, signal2.Token);
Assert.True(combined.IsCancellationRequested,
"CreateLinkedToken_SourceTokenAlreadySignalled: The combined token should immediately be in the signalled state.");
}
[Fact]
public static void CreateLinkedToken_MultistepComposition_SourceTokenAlreadySignalled()
{
//two-step composition
CancellationTokenSource signal1 = new CancellationTokenSource();
signal1.Cancel(); //early signal.
CancellationTokenSource signal2 = new CancellationTokenSource();
CancellationTokenSource combined1 = CancellationTokenSource.CreateLinkedTokenSource(signal1.Token, signal2.Token);
CancellationTokenSource signal3 = new CancellationTokenSource();
CancellationTokenSource combined2 = CancellationTokenSource.CreateLinkedTokenSource(signal3.Token, combined1.Token);
Assert.True(combined2.IsCancellationRequested,
"CreateLinkedToken_MultistepComposition_SourceTokenAlreadySignalled: The 2-step combined token should immediately be in the signalled state.");
}
[Fact]
public static void CallbacksOrderIsLifo()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
List<string> callbackOutput = new List<string>();
token.Register(() => callbackOutput.Add("Callback1"));
token.Register(() => callbackOutput.Add("Callback2"));
tokenSource.Cancel();
Assert.Equal("Callback2", callbackOutput[0]);
Assert.Equal("Callback1", callbackOutput[1]);
}
[Fact]
public static void Enlist_EarlyAndLate()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
CancellationTokenSource earlyEnlistedTokenSource = new CancellationTokenSource();
token.Register(() => earlyEnlistedTokenSource.Cancel());
tokenSource.Cancel();
Assert.Equal(true, earlyEnlistedTokenSource.IsCancellationRequested);
CancellationTokenSource lateEnlistedTokenSource = new CancellationTokenSource();
token.Register(() => lateEnlistedTokenSource.Cancel());
Assert.Equal(true, lateEnlistedTokenSource.IsCancellationRequested);
}
/// <summary>
/// This test from donnya. Thanks Donny.
/// </summary>
/// <returns></returns>
[Fact]
public static void WaitAll()
{
Debug.WriteLine("WaitAll: Testing CancellationTokenTests.WaitAll, If Join does not work, then a deadlock will occur.");
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationTokenSource signal2 = new CancellationTokenSource();
ManualResetEvent mre = new ManualResetEvent(false);
ManualResetEvent mre2 = new ManualResetEvent(false);
Task t = new Task(() =>
{
WaitHandle.WaitAll(new WaitHandle[] { tokenSource.Token.WaitHandle, signal2.Token.WaitHandle, mre });
mre2.Set();
});
t.Start();
tokenSource.Cancel();
signal2.Cancel();
mre.Set();
mre2.WaitOne();
t.Wait();
//true if the Join succeeds.. otherwise a deadlock will occur.
}
[Fact]
public static void BehaviourAfterCancelSignalled()
{
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
token.Register(() => { });
tokenSource.Cancel();
}
[Fact]
public static void Cancel_ThrowOnFirstException()
{
ManualResetEvent mre_CancelHasBeenEnacted = new ManualResetEvent(false);
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
// Main test body
ArgumentException caughtException = null;
token.Register(() =>
{
throw new InvalidOperationException();
});
token.Register(() =>
{
throw new ArgumentException();
}); // !!NOTE: Due to LIFO ordering, this delegate should be the only one to run.
Task.Run(() =>
{
try
{
tokenSource.Cancel(true);
}
catch (ArgumentException ex)
{
caughtException = ex;
}
catch (Exception ex)
{
Assert.True(false, string.Format("Cancel_ThrowOnFirstException: The wrong exception type was thrown. ex=" + ex));
}
mre_CancelHasBeenEnacted.Set();
});
mre_CancelHasBeenEnacted.WaitOne();
Assert.NotNull(caughtException);
}
[Fact]
public static void Cancel_DontThrowOnFirstException()
{
ManualResetEvent mre_CancelHasBeenEnacted = new ManualResetEvent(false);
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
// Main test body
AggregateException caughtException = null;
token.Register(() => { throw new ArgumentException(); });
token.Register(() => { throw new InvalidOperationException(); });
Task.Run(
() =>
{
try
{
tokenSource.Cancel(false);
}
catch (AggregateException ex)
{
caughtException = ex;
}
mre_CancelHasBeenEnacted.Set();
}
);
mre_CancelHasBeenEnacted.WaitOne();
Assert.NotNull(caughtException);
Assert.Equal(2, caughtException.InnerExceptions.Count);
Assert.True(caughtException.InnerExceptions[0] is InvalidOperationException,
"Cancel_ThrowOnFirstException: Due to LIFO call order, the first inner exception should be an InvalidOperationException.");
Assert.True(caughtException.InnerExceptions[1] is ArgumentException,
"Cancel_ThrowOnFirstException: Due to LIFO call order, the second inner exception should be an ArgumentException.");
}
[Fact]
public static void CancellationRegistration_RepeatDispose()
{
Exception caughtException = null;
CancellationTokenSource cts = new CancellationTokenSource();
CancellationToken ct = cts.Token;
CancellationTokenRegistration registration = ct.Register(() => { });
try
{
registration.Dispose();
registration.Dispose();
}
catch (Exception ex)
{
caughtException = ex;
}
Assert.Null(caughtException);
}
[Fact]
public static void CancellationTokenRegistration_EqualityAndHashCode()
{
CancellationTokenSource outerCTS = new CancellationTokenSource();
{
// different registrations on 'different' default tokens
CancellationToken ct1 = new CancellationToken();
CancellationToken ct2 = new CancellationToken();
CancellationTokenRegistration ctr1 = ct1.Register(() => outerCTS.Cancel());
CancellationTokenRegistration ctr2 = ct2.Register(() => outerCTS.Cancel());
Assert.True(ctr1.Equals(ctr2),
"CancellationTokenRegistration_EqualityAndHashCode: [1]The two registrations should compare equal, as they are both dummies.");
Assert.True(ctr1 == ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: [2]The two registrations should compare equal, as they are both dummies.");
Assert.False(ctr1 != ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: [3]The two registrations should compare equal, as they are both dummies.");
Assert.True(ctr1.GetHashCode() == ctr2.GetHashCode(),
"CancellationTokenRegistration_EqualityAndHashCode: [4]The two registrations should have the same hashcode, as they are both dummies.");
}
{
// different registrations on the same already cancelled token
CancellationTokenSource cts = new CancellationTokenSource();
cts.Cancel();
CancellationToken ct = cts.Token;
CancellationTokenRegistration ctr1 = ct.Register(() => outerCTS.Cancel());
CancellationTokenRegistration ctr2 = ct.Register(() => outerCTS.Cancel());
Assert.True(ctr1.Equals(ctr2),
"CancellationTokenRegistration_EqualityAndHashCode: [1]The two registrations should compare equal, as they are both dummies due to CTS being already canceled.");
Assert.True(ctr1 == ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: [2]The two registrations should compare equal, as they are both dummies due to CTS being already canceled.");
Assert.False(ctr1 != ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: [3]The two registrations should compare equal, as they are both dummies due to CTS being already canceled.");
Assert.True(ctr1.GetHashCode() == ctr2.GetHashCode(),
"CancellationTokenRegistration_EqualityAndHashCode: [4]The two registrations should have the same hashcode, as they are both dummies due to CTS being already canceled.");
}
{
// different registrations on one real token
CancellationTokenSource cts1 = new CancellationTokenSource();
CancellationTokenRegistration ctr1 = cts1.Token.Register(() => outerCTS.Cancel());
CancellationTokenRegistration ctr2 = cts1.Token.Register(() => outerCTS.Cancel());
Assert.False(ctr1.Equals(ctr2),
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not compare equal.");
Assert.False(ctr1 == ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not compare equal.");
Assert.True(ctr1 != ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not compare equal.");
Assert.False(ctr1.GetHashCode() == ctr2.GetHashCode(),
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not have the same hashcode.");
CancellationTokenRegistration ctr1copy = ctr1;
Assert.True(ctr1 == ctr1copy, "The two registrations should be equal.");
}
{
// registrations on different real tokens.
// different registrations on one token
CancellationTokenSource cts1 = new CancellationTokenSource();
CancellationTokenSource cts2 = new CancellationTokenSource();
CancellationTokenRegistration ctr1 = cts1.Token.Register(() => outerCTS.Cancel());
CancellationTokenRegistration ctr2 = cts2.Token.Register(() => outerCTS.Cancel());
Assert.False(ctr1.Equals(ctr2),
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not compare equal.");
Assert.False(ctr1 == ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not compare equal.");
Assert.True(ctr1 != ctr2,
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not compare equal.");
Assert.False(ctr1.GetHashCode() == ctr2.GetHashCode(),
"CancellationTokenRegistration_EqualityAndHashCode: The two registrations should not have the same hashcode.");
CancellationTokenRegistration ctr1copy = ctr1;
Assert.True(ctr1.Equals(ctr1copy), "The two registrations should be equal.");
}
}
[Fact]
public static void CancellationTokenLinking_ODEinTarget()
{
CancellationTokenSource cts1 = new CancellationTokenSource();
CancellationTokenSource cts2 = CancellationTokenSource.CreateLinkedTokenSource(cts1.Token, new CancellationToken());
Exception caughtException = null;
cts2.Token.Register(() => { throw new ObjectDisposedException("myException"); });
try
{
cts1.Cancel(true);
}
catch (Exception ex)
{
caughtException = ex;
}
Assert.True(
caughtException is AggregateException
&& caughtException.InnerException is ObjectDisposedException
&& caughtException.InnerException.Message.Contains("myException"),
"CancellationTokenLinking_ODEinTarget: The users ODE should be caught. Actual:" + caughtException);
}
[Fact]
public static void ThrowIfCancellationRequested()
{
OperationCanceledException caughtEx = null;
CancellationTokenSource cts = new CancellationTokenSource();
CancellationToken ct = cts.Token;
ct.ThrowIfCancellationRequested();
// no exception should occur
cts.Cancel();
try
{
ct.ThrowIfCancellationRequested();
}
catch (OperationCanceledException oce)
{
caughtEx = oce;
}
Assert.NotNull(caughtEx);
Assert.Equal(ct, caughtEx.CancellationToken);
}
/// <summary>
/// ensure that calling ctr.Dipose() from within a cancellation callback will not deadlock.
/// </summary>
/// <returns></returns>
[Fact]
public static void DeregisterFromWithinACallbackIsSafe_BasicTest()
{
Debug.WriteLine("CancellationTokenTests.Bug720327_DeregisterFromWithinACallbackIsSafe_BasicTest()");
Debug.WriteLine(" - this method should complete immediately. Delay to complete indicates a deadlock failure.");
CancellationTokenSource cts = new CancellationTokenSource();
CancellationToken ct = cts.Token;
CancellationTokenRegistration ctr1 = ct.Register(() => { });
ct.Register(() => { ctr1.Dispose(); });
cts.Cancel();
Debug.WriteLine(" - Completed OK.");
}
// regression test
// Disposing a linkedCTS would previously throw if a source CTS had been
// disposed already. (it is an error for a user to get in this situation, but we decided to allow it to work).
[Fact]
public static void ODEWhenDisposingLinkedCTS()
{
try
{
// User passes a cancellation token (CT) to component A.
CancellationTokenSource userTokenSource = new CancellationTokenSource();
CancellationToken userToken = userTokenSource.Token;
// Component A implements "timeout", by creating its own cancellation token source (CTS) and invoking cts.Cancel() when the timeout timer fires.
CancellationTokenSource cts2 = new CancellationTokenSource();
CancellationToken cts2Token = cts2.Token;
// Component A creates a linked token source representing the CT from the user and the "timeout" CT.
var linkedTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cts2Token, userToken);
// User calls Cancel() on his CTS and then Dispose()
userTokenSource.Cancel();
userTokenSource.Dispose();
// Component B correctly cancels the operation, returns to component A.
// ...
// Component A now disposes the linked CTS => ObjectDisposedException is thrown by cts.Dispose() because the user CTS was already disposed.
linkedTokenSource.Dispose();
}
catch (Exception ex)
{
if (ex is ObjectDisposedException)
{
Assert.True(false, string.Format("Bug901737_ODEWhenDisposingLinkedCTS: - ODE Occurred!"));
}
else
{
Assert.True(false, string.Format("Bug901737_ODEWhenDisposingLinkedCTS: - Exception Occurred (not an ODE!!): " + ex));
}
}
}
// Several tests for deriving custom user types from CancellationTokenSource
[Fact]
public static void DerivedCancellationTokenSource()
{
// Verify that a derived CTS is functional
{
CancellationTokenSource c = new DerivedCTS(null);
CancellationToken token = c.Token;
var task = Task.Factory.StartNew(() => c.Cancel());
task.Wait();
Assert.True(token.IsCancellationRequested,
"DerivedCancellationTokenSource: The token should have been cancelled.");
}
// Verify that callback list on a derived CTS is functional
{
CancellationTokenSource c = new DerivedCTS(null);
CancellationToken token = c.Token;
int callbackRan = 0;
token.Register(() => Interlocked.Increment(ref callbackRan));
var task = Task.Factory.StartNew(() => c.Cancel());
task.Wait();
SpinWait.SpinUntil(() => callbackRan > 0, 1000);
Assert.True(callbackRan == 1,
"DerivedCancellationTokenSource: Expected the callback to run once. Instead, it ran " + callbackRan + " times.");
}
// Test the Dispose path for a class derived from CancellationTokenSource
{
var disposeTracker = new DisposeTracker();
CancellationTokenSource c = new DerivedCTS(disposeTracker);
Assert.True(c.Token.CanBeCanceled,
"DerivedCancellationTokenSource: The token should be cancellable.");
c.Dispose();
// Dispose() should have prevented the finalizer from running. Give the finalizer a chance to run. If this
// results in Dispose(false) getting called, we'll catch the issue.
GC.Collect();
GC.WaitForPendingFinalizers();
Assert.True(disposeTracker.DisposeTrueCalled,
"DerivedCancellationTokenSource: Dispose(true) should have been called.");
Assert.False(disposeTracker.DisposeFalseCalled,
"DerivedCancellationTokenSource: Dispose(false) should not have been called.");
}
// Test the finalization code path for a class derived from CancellationTokenSource
{
var disposeTracker = new DisposeTracker();
// Since the object is not assigned into a variable, it can be GC'd before the current method terminates.
// (This is only an issue in the Debug build)
new DerivedCTS(disposeTracker);
// Wait until the DerivedCTS object is finalized
SpinWait.SpinUntil(() =>
{
GC.Collect();
GC.WaitForPendingFinalizers();
GC.Collect();
return disposeTracker.DisposeTrueCalled;
}, 500);
Assert.False(disposeTracker.DisposeTrueCalled,
"DerivedCancellationTokenSource: Dispose(true) should not have been called.");
Assert.True(disposeTracker.DisposeFalseCalled,
"DerivedCancellationTokenSource: Dispose(false) should have been called.");
}
// Verify that Dispose(false) is a no-op on the CTS. Dispose(false) should only release any unmanaged resources, and
// CTS does not currently hold any unmanaged resources.
{
var disposeTracker = new DisposeTracker();
DerivedCTS c = new DerivedCTS(disposeTracker);
c.DisposeUnmanaged();
// No exception expected - the CancellationTokenSource should be valid
Assert.True(c.Token.CanBeCanceled,
"DerivedCancellationTokenSource: The token should still be cancellable.");
Assert.False(disposeTracker.DisposeTrueCalled,
"DerivedCancellationTokenSource: Dispose(true) should not have been called.");
Assert.True(disposeTracker.DisposeFalseCalled,
"DerivedCancellationTokenSource: Dispose(false) should have run.");
}
}
// Several tests for deriving custom user types from CancellationTokenSource
[Fact]
public static void DerivedCancellationTokenSource_Negative()
{
// Test the Dispose path for a class derived from CancellationTokenSource
{
var disposeTracker = new DisposeTracker();
CancellationTokenSource c = new DerivedCTS(disposeTracker);
c.Dispose();
// Dispose() should have prevented the finalizer from running. Give the finalizer a chance to run. If this
// results in Dispose(false) getting called, we'll catch the issue.
GC.Collect();
GC.WaitForPendingFinalizers();
Assert.Throws<ObjectDisposedException>(
() =>
{
// Accessing the Token property should throw an ObjectDisposedException
if (c.Token.CanBeCanceled)
Assert.True(false, string.Format("DerivedCancellationTokenSource: Accessing the Token property should throw an ObjectDisposedException, but it did not."));
else
Assert.True(false, string.Format("DerivedCancellationTokenSource: Accessing the Token property should throw an ObjectDisposedException, but it did not."));
});
}
}
[Fact]
public static void CancellationTokenSourceWithTimer()
{
TimeSpan bigTimeSpan = new TimeSpan(2000, 0, 0, 0, 0);
TimeSpan reasonableTimeSpan = new TimeSpan(0, 0, 1);
CancellationTokenSource cts = new CancellationTokenSource();
cts.Dispose();
//
// Test out some int-based timeout logic
//
cts = new CancellationTokenSource(-1); // should be an infinite timeout
CancellationToken token = cts.Token;
ManualResetEventSlim mres = new ManualResetEventSlim(false);
CancellationTokenRegistration ctr = token.Register(() => mres.Set());
Assert.False(token.IsCancellationRequested,
"CancellationTokenSourceWithTimer: Cancellation signaled on infinite timeout (int)!");
cts.CancelAfter(1000000);
Assert.False(token.IsCancellationRequested,
"CancellationTokenSourceWithTimer: Cancellation signaled on super-long timeout (int) !");
cts.CancelAfter(1);
Debug.WriteLine("CancellationTokenSourceWithTimer: > About to wait on cancellation that should occur soon (int)... if we hang, something bad happened");
mres.Wait();
cts.Dispose();
//
// Test out some TimeSpan-based timeout logic
//
TimeSpan prettyLong = new TimeSpan(1, 0, 0);
cts = new CancellationTokenSource(prettyLong);
token = cts.Token;
mres = new ManualResetEventSlim(false);
ctr = token.Register(() => mres.Set());
Assert.False(token.IsCancellationRequested,
"CancellationTokenSourceWithTimer: Cancellation signaled on super-long timeout (TimeSpan,1)!");
cts.CancelAfter(prettyLong);
Assert.False(token.IsCancellationRequested,
"CancellationTokenSourceWithTimer: Cancellation signaled on super-long timeout (TimeSpan,2) !");
cts.CancelAfter(new TimeSpan(1000));
Debug.WriteLine("CancellationTokenSourceWithTimer: > About to wait on cancellation that should occur soon (TimeSpan)... if we hang, something bad happened");
mres.Wait();
cts.Dispose();
}
[Fact]
public static void CancellationTokenSourceWithTimer_Negative()
{
TimeSpan bigTimeSpan = new TimeSpan(2000, 0, 0, 0, 0);
TimeSpan reasonableTimeSpan = new TimeSpan(0, 0, 1);
//
// Test exception logic
//
Assert.Throws<ArgumentOutOfRangeException>(
() => { new CancellationTokenSource(-2); });
Assert.Throws<ArgumentOutOfRangeException>(
() => { new CancellationTokenSource(bigTimeSpan); });
CancellationTokenSource cts = new CancellationTokenSource();
Assert.Throws<ArgumentOutOfRangeException>(
() => { cts.CancelAfter(-2); });
Assert.Throws<ArgumentOutOfRangeException>(
() => { cts.CancelAfter(bigTimeSpan); });
cts.Dispose();
Assert.Throws<ObjectDisposedException>(
() => { cts.CancelAfter(1); });
Assert.Throws<ObjectDisposedException>(
() => { cts.CancelAfter(reasonableTimeSpan); });
}
[Fact]
public static void EnlistWithSyncContext_BeforeCancel()
{
ManualResetEvent mre_CancelHasBeenEnacted = new ManualResetEvent(false); //synchronization helper
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
// Install a SynchronizationContext...
SynchronizationContext prevailingSyncCtx = SynchronizationContext.Current;
TestingSynchronizationContext testContext = new TestingSynchronizationContext();
SetSynchronizationContext(testContext);
// Main test body
// register a null delegate, but use the currently registered syncContext.
// the testSyncContext will track that it was used when the delegate is invoked.
token.Register(() => { }, true);
Task.Run(
() =>
{
tokenSource.Cancel();
mre_CancelHasBeenEnacted.Set();
}
);
mre_CancelHasBeenEnacted.WaitOne();
Assert.True(testContext.DidSendOccur,
"EnlistWithSyncContext_BeforeCancel: the delegate should have been called via Send to SyncContext.");
//Cleanup.
SetSynchronizationContext(prevailingSyncCtx);
}
[Fact]
public static void EnlistWithSyncContext_BeforeCancel_ThrowingExceptionInSyncContextDelegate()
{
ManualResetEvent mre_CancelHasBeenEnacted = new ManualResetEvent(false); //synchronization helper
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
// Install a SynchronizationContext...
SynchronizationContext prevailingSyncCtx = SynchronizationContext.Current;
TestingSynchronizationContext testContext = new TestingSynchronizationContext();
SetSynchronizationContext(testContext);
// Main test body
AggregateException caughtException = null;
// register a null delegate, but use the currently registered syncContext.
// the testSyncContext will track that it was used when the delegate is invoked.
token.Register(() => { throw new ArgumentException(); }, true);
Task.Run(
() =>
{
try
{
tokenSource.Cancel();
}
catch (AggregateException ex)
{
caughtException = ex;
}
mre_CancelHasBeenEnacted.Set();
}
);
mre_CancelHasBeenEnacted.WaitOne();
Assert.True(testContext.DidSendOccur,
"EnlistWithSyncContext_BeforeCancel_ThrowingExceptionInSyncContextDelegate: the delegate should have been called via Send to SyncContext.");
Assert.NotNull(caughtException);
Assert.Equal(1, caughtException.InnerExceptions.Count);
Assert.True(caughtException.InnerExceptions[0] is ArgumentException,
"EnlistWithSyncContext_BeforeCancel_ThrowingExceptionInSyncContextDelegate: The inner exception should be an ArgumentException.");
//Cleanup.
SetSynchronizationContext(prevailingSyncCtx);
}
[Fact]
public static void EnlistWithSyncContext_BeforeCancel_ThrowingExceptionInSyncContextDelegate_ThrowOnFirst()
{
ManualResetEvent mre_CancelHasBeenEnacted = new ManualResetEvent(false); //synchronization helper
CancellationTokenSource tokenSource = new CancellationTokenSource();
CancellationToken token = tokenSource.Token;
// Install a SynchronizationContext...
SynchronizationContext prevailingSyncCtx = SynchronizationContext.Current;
TestingSynchronizationContext testContext = new TestingSynchronizationContext();
SetSynchronizationContext(testContext);
// Main test body
ArgumentException caughtException = null;
// register a null delegate, but use the currently registered syncContext.
// the testSyncContext will track that it was used when the delegate is invoked.
token.Register(() => { throw new ArgumentException(); }, true);
Task.Run(
() =>
{
try
{
tokenSource.Cancel(true);
}
catch (ArgumentException ex)
{
caughtException = ex;
}
mre_CancelHasBeenEnacted.Set();
}
);
mre_CancelHasBeenEnacted.WaitOne();
Assert.True(testContext.DidSendOccur,
"EnlistWithSyncContext_BeforeCancel_ThrowingExceptionInSyncContextDelegate_ThrowOnFirst: the delegate should have been called via Send to SyncContext.");
Assert.NotNull(caughtException);
//Cleanup
SetSynchronizationContext(prevailingSyncCtx);
}
// Test that we marshal exceptions back if we run callbacks on a sync context.
// (This assumes that a syncContext.Send() may not be doing the marshalling itself).
[Fact]
public static void SyncContextWithExceptionThrowingCallback()
{
Exception caughtEx1 = null;
AggregateException caughtEx2 = null;
SynchronizationContext prevailingSyncCtx = SynchronizationContext.Current;
SetSynchronizationContext(new ThreadCrossingSynchronizationContext());
// -- Test 1 -- //
CancellationTokenSource cts = new CancellationTokenSource();
cts.Token.Register(
() => { throw new Exception("testEx1"); }, true);
try
{
cts.Cancel(true); //throw on first exception
}
catch (Exception ex)
{
caughtEx1 = (AggregateException)ex;
}
Assert.NotNull(caughtEx1);
// -- Test 2 -- //
cts = new CancellationTokenSource();
cts.Token.Register(
() => { throw new ArgumentException("testEx2"); }, true);
try
{
cts.Cancel(false); //do not throw on first exception
}
catch (AggregateException ex)
{
caughtEx2 = (AggregateException)ex;
}
Assert.NotNull(caughtEx2);
Assert.Equal(1, caughtEx2.InnerExceptions.Count);
// clean up
SetSynchronizationContext(prevailingSyncCtx);
}
[Fact]
public static void Bug720327_DeregisterFromWithinACallbackIsSafe_SyncContextTest()
{
Debug.WriteLine("* CancellationTokenTests.Bug720327_DeregisterFromWithinACallbackIsSafe_SyncContextTest()");
Debug.WriteLine(" - this method should complete immediately. Delay to complete indicates a deadlock failure.");
//Install our syncContext.
SynchronizationContext prevailingSyncCtx = SynchronizationContext.Current;
ThreadCrossingSynchronizationContext threadCrossingSyncCtx = new ThreadCrossingSynchronizationContext();
SetSynchronizationContext(threadCrossingSyncCtx);
CancellationTokenSource cts = new CancellationTokenSource();
CancellationToken ct = cts.Token;
CancellationTokenRegistration ctr1 = ct.Register(() => { });
CancellationTokenRegistration ctr2 = ct.Register(() => { });
CancellationTokenRegistration ctr3 = ct.Register(() => { });
CancellationTokenRegistration ctr4 = ct.Register(() => { });
ct.Register(() => { ctr1.Dispose(); }, true); // with a custom syncContext
ct.Register(() => { ctr2.Dispose(); }, false); // without
ct.Register(() => { ctr3.Dispose(); }, true); // with a custom syncContext
ct.Register(() => { ctr4.Dispose(); }, false); // without
cts.Cancel();
Debug.WriteLine(" - Completed OK.");
//cleanup
SetSynchronizationContext(prevailingSyncCtx);
}
#region Helper Classes and Methods
private class TestingSynchronizationContext : SynchronizationContext
{
public bool DidSendOccur = false;
override public void Send(SendOrPostCallback d, Object state)
{
//Note: another idea was to install this syncContext on the executing thread.
//unfortunately, the ExecutionContext business gets in the way and reestablishes a default SyncContext.
DidSendOccur = true;
base.Send(d, state); // call the delegate with our syncContext installed.
}
}
/// <summary>
/// This syncContext uses a different thread to run the work
/// This is similar to how WindowsFormsSynchronizationContext works.
/// </summary>
private class ThreadCrossingSynchronizationContext : SynchronizationContext
{
public bool DidSendOccur = false;
override public void Send(SendOrPostCallback d, Object state)
{
Exception marshalledException = null;
Task t = new Task(
(passedInState) =>
{
//Debug.WriteLine(" threadCrossingSyncContext..running callback delegate on threadID = " + Thread.CurrentThread.ManagedThreadId);
try
{
d(passedInState);
}
catch (Exception e)
{
marshalledException = e;
}
}, state);
t.Start();
t.Wait();
//t.Start(state);
//t.Join();
if (marshalledException != null)
throw new AggregateException("DUMMY: ThreadCrossingSynchronizationContext.Send captured and propogated an exception",
marshalledException);
}
}
/// <summary>
/// A test class derived from CancellationTokenSource
/// </summary>
internal class DerivedCTS : CancellationTokenSource
{
private DisposeTracker _disposeTracker;
public DerivedCTS(DisposeTracker disposeTracker)
{
_disposeTracker = disposeTracker;
}
protected override void Dispose(bool disposing)
{
// Dispose any derived class state. DerivedCTS simply records that Dispose() has been called.
if (_disposeTracker != null)
{
if (disposing) { _disposeTracker.DisposeTrueCalled = true; }
else { _disposeTracker.DisposeFalseCalled = true; }
}
// Dispose the state in the CancellationTokenSource base class
base.Dispose(disposing);
}
/// <summary>
/// A helper method to call Dispose(false). That allows us to easily simulate finalization of CTS, while still maintaining
/// a reference to the CTS.
/// </summary>
public void DisposeUnmanaged()
{
Dispose(false);
}
~DerivedCTS()
{
Dispose(false);
}
}
/// <summary>
/// A simple class to track whether Dispose(bool) method has been called and if so, what was the bool flag.
/// </summary>
internal class DisposeTracker
{
public bool DisposeTrueCalled = false;
public bool DisposeFalseCalled = false;
}
public static void SetSynchronizationContext(SynchronizationContext sc)
{
SynchronizationContext.SetSynchronizationContext(sc);
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void AddDouble()
{
var test = new SimpleBinaryOpTest__AddDouble();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__AddDouble
{
private const int VectorSize = 16;
private const int ElementCount = VectorSize / sizeof(Double);
private static Double[] _data1 = new Double[ElementCount];
private static Double[] _data2 = new Double[ElementCount];
private static Vector128<Double> _clsVar1;
private static Vector128<Double> _clsVar2;
private Vector128<Double> _fld1;
private Vector128<Double> _fld2;
private SimpleBinaryOpTest__DataTable<Double> _dataTable;
static SimpleBinaryOpTest__AddDouble()
{
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); _data2[i] = (double)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize);
}
public SimpleBinaryOpTest__AddDouble()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); _data2[i] = (double)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); _data2[i] = (double)(random.NextDouble()); }
_dataTable = new SimpleBinaryOpTest__DataTable<Double>(_data1, _data2, new Double[ElementCount], VectorSize);
}
public bool IsSupported => Sse2.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Sse2.Add(
Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Sse2.Add(
Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Sse2.Add(
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Sse2).GetMethod(nameof(Sse2.Add), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Sse2).GetMethod(nameof(Sse2.Add), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Sse2).GetMethod(nameof(Sse2.Add), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) })
.Invoke(null, new object[] {
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)),
Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Sse2.Add(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr);
var result = Sse2.Add(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr));
var result = Sse2.Add(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr));
var right = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr));
var result = Sse2.Add(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__AddDouble();
var result = Sse2.Add(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Sse2.Add(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector128<Double> left, Vector128<Double> right, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[ElementCount];
Double[] inArray2 = new Double[ElementCount];
Double[] outArray = new Double[ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[ElementCount];
Double[] inArray2 = new Double[ElementCount];
Double[] outArray = new Double[ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "")
{
if (BitConverter.DoubleToInt64Bits(left[0] + right[0]) != BitConverter.DoubleToInt64Bits(result[0]))
{
Succeeded = false;
}
else
{
for (var i = 1; i < left.Length; i++)
{
if (BitConverter.DoubleToInt64Bits(left[i] + right[i]) != BitConverter.DoubleToInt64Bits(result[i]))
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Sse2)}.{nameof(Sse2.Add)}<Double>: {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
#region License
// Copyright (c) K2 Workflow (SourceCode Technology Holdings Inc.). All rights reserved.
// Licensed under the MIT License. See LICENSE file in the project root for full license information.
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using Xunit;
namespace SourceCode.Chasm.Tests
{
public static class TreeNodeListTests
{
#region Constants
private static readonly TreeNode Node0 = new TreeNode(nameof(Node0), NodeKind.Tree, Sha1.Hash(nameof(Node0)));
private static readonly TreeNode Node0Blob = new TreeNode(nameof(Node0), NodeKind.Blob, Sha1.Hash(nameof(Node0Blob)));
private static readonly TreeNode Node1 = new TreeNode(nameof(Node1), NodeKind.Blob, Sha1.Hash(nameof(Node1)));
private static readonly TreeNode Node2 = new TreeNode(nameof(Node2), NodeKind.Tree, Sha1.Hash(nameof(Node2)));
private static readonly TreeNode Node3 = new TreeNode(nameof(Node3), NodeKind.Blob, Sha1.Hash(nameof(Node3)));
#endregion
#region Methods
private static void AssertEmpty(TreeNodeList treeNodeList)
{
Assert.Empty(treeNodeList);
Assert.Equal(TreeNodeList.Empty, treeNodeList); // By design
Assert.Equal(TreeNodeList.Empty.GetHashCode(), treeNodeList.GetHashCode());
Assert.Empty(treeNodeList.Keys);
Assert.Throws<IndexOutOfRangeException>(() => treeNodeList[0]);
Assert.Throws<KeyNotFoundException>(() => treeNodeList["x"]);
Assert.False(treeNodeList.TryGetValue("x", out _));
Assert.False(treeNodeList.TryGetValue("x", NodeKind.Blob, out _));
Assert.False(treeNodeList.Equals(new object()));
Assert.Contains("Count: 0", treeNodeList.ToString());
Assert.Equal(-1, treeNodeList.IndexOf(Guid.NewGuid().ToString()));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Empty))]
public static void TreeNodeList_Empty()
{
var noData = new TreeNodeList();
AssertEmpty(noData);
var nullData = new TreeNodeList(null);
AssertEmpty(nullData);
var collData = new TreeNodeList((IList<TreeNode>)null);
AssertEmpty(collData);
var emptyData = new TreeNodeList(Array.Empty<TreeNode>());
AssertEmpty(emptyData);
Assert.Empty(TreeNodeList.Empty);
Assert.Equal(default, TreeNodeList.Empty);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Sorting))]
public static void TreeNodeList_Sorting()
{
var nodes = new[] { Node0, Node1 };
var tree0 = new TreeNodeList(nodes.OrderBy(n => n.Sha1).ToArray());
var tree1 = new TreeNodeList(nodes.OrderByDescending(n => n.Sha1).ToList()); // ICollection<T>
Assert.Equal(tree0[0], tree1[0]);
Assert.Equal(tree0[1], tree1[1]);
Assert.True(tree1[Node0.Name] == Node0);
Assert.True(tree1[Node1.Name] == Node1);
Assert.False(tree1.ContainsKey("x"));
Assert.True(tree1.ContainsKey(Node0.Name));
Assert.True(tree1.ContainsKey(Node1.Name));
Assert.False(tree1.TryGetValue("x", out _));
Assert.True(tree1.TryGetValue(Node0.Name, out var v20) && v20 == Node0);
Assert.True(tree1.TryGetValue(Node1.Name, out var v21) && v21 == Node1);
Assert.False(tree1.TryGetValue(Node0.Name, NodeKind.Blob, out _));
Assert.True(tree1.TryGetValue(Node0.Name, Node0.Kind, out _));
nodes = new[] { Node0, Node1, Node2 };
tree0 = new TreeNodeList(nodes.OrderBy(n => n.Sha1).ToArray());
tree1 = new TreeNodeList(nodes.OrderByDescending(n => n.Sha1).ToList()); // ICollection<T>
Assert.True(tree1[Node0.Name] == Node0);
Assert.True(tree1[Node1.Name] == Node1);
Assert.True(tree1[Node2.Name] == Node2);
Assert.False(tree1.ContainsKey("x"));
Assert.True(tree1.ContainsKey(Node0.Name));
Assert.True(tree1.ContainsKey(Node1.Name));
Assert.True(tree1.ContainsKey(Node2.Name));
Assert.False(tree1.TryGetValue("x", out _));
Assert.True(tree1.TryGetValue(Node0.Name, out var v30) && v30 == Node0);
Assert.True(tree1.TryGetValue(Node1.Name, out var v31) && v31 == Node1);
Assert.True(tree1.TryGetValue(Node2.Name, out var v32) && v32 == Node2);
Assert.Equal(tree0[0], tree1[0]);
Assert.Equal(tree0[1], tree1[1]);
Assert.Equal(tree0[2], tree1[2]);
nodes = new[] { Node0, Node1, Node2, Node3 };
tree0 = new TreeNodeList(nodes.OrderBy(n => n.Sha1).ToArray());
tree1 = new TreeNodeList(nodes.OrderByDescending(n => n.Sha1).ToList()); // ICollection<T>
Assert.True(tree1[Node0.Name] == Node0);
Assert.True(tree1[Node1.Name] == Node1);
Assert.True(tree1[Node2.Name] == Node2);
Assert.True(tree1[Node3.Name] == Node3);
Assert.False(tree1.ContainsKey("x"));
Assert.True(tree1.ContainsKey(Node0.Name));
Assert.True(tree1.ContainsKey(Node1.Name));
Assert.True(tree1.ContainsKey(Node2.Name));
Assert.True(tree1.ContainsKey(Node3.Name));
Assert.False(tree1.TryGetValue("x", out _));
Assert.True(tree1.TryGetValue(Node0.Name, out var v40) && v40 == Node0);
Assert.True(tree1.TryGetValue(Node1.Name, out var v41) && v41 == Node1);
Assert.True(tree1.TryGetValue(Node2.Name, out var v42) && v42 == Node2);
Assert.True(tree1.TryGetValue(Node3.Name, out var v43) && v43 == Node3);
Assert.Equal(tree0[0], tree1[0]);
Assert.Equal(tree0[1], tree1[1]);
Assert.Equal(tree0[2], tree1[2]);
Assert.Equal(tree0[3], tree1[3]);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Full_2))]
public static void TreeNodeList_Duplicate_Full_2()
{
var nodes = new[] { Node0, Node0 };
var tree = new TreeNodeList(nodes);
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0));
tree = new TreeNodeList(nodes.ToList()); // ICollection<T>
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Full_3))]
public static void TreeNodeList_Duplicate_Full_3()
{
var nodes = new[] { Node0, Node1, Node0 }; // Shuffled
var tree = new TreeNodeList(nodes);
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0), n => Assert.Equal(n, Node1));
tree = new TreeNodeList(nodes.ToList()); // ICollection<T>
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0), n => Assert.Equal(n, Node1));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Full_2_Exception))]
public static void TreeNodeList_Duplicate_Full_2_Exception()
{
// Arrange
var nodes = new[] { Node0, Node0Blob }; // Shuffled
// Action
var ex = Assert.Throws<ArgumentException>(() => new TreeNodeList(nodes));
// Assert
Assert.Contains(Node0.Name, ex.Message);
Assert.Contains(Node0.Sha1.ToString(), ex.Message);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Full_3_Exception))]
public static void TreeNodeList_Duplicate_Full_3_Exception()
{
// Arrange
var nodes = new[] { Node0, Node0Blob, Node1 }; // Shuffled
// Action
var ex = Assert.Throws<ArgumentException>(() => new TreeNodeList(nodes));
// Assert
Assert.Contains(Node0.Name, ex.Message);
Assert.Contains(Node0.Sha1.ToString(), ex.Message);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Full_4))]
public static void TreeNodeList_Duplicate_Full_4()
{
var nodes = new[] { Node0, Node2, Node1, Node0 }; // Shuffled
var tree = new TreeNodeList(nodes);
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0), n => Assert.Equal(n, Node1), n => Assert.Equal(n, Node2));
tree = new TreeNodeList(nodes.ToList()); // ICollection<T>
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0), n => Assert.Equal(n, Node1), n => Assert.Equal(n, Node2));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Full_N))]
public static void TreeNodeList_Duplicate_Full_N()
{
var nodes = new[] { Node3, Node1, Node2, Node0, Node3, Node0, Node1, Node0, Node1, Node2, Node0, Node3 }; // Shuffled
var tree = new TreeNodeList(nodes);
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0), n => Assert.Equal(n, Node1), n => Assert.Equal(n, Node2), n => Assert.Equal(n, Node3));
tree = new TreeNodeList(nodes.ToList()); // ICollection<T>
Assert.Collection<TreeNode>(tree, n => Assert.Equal(n, Node0), n => Assert.Equal(n, Node1), n => Assert.Equal(n, Node2), n => Assert.Equal(n, Node3));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Name))]
public static void TreeNodeList_Duplicate_Name()
{
var nodes = new[] { new TreeNode(Node0.Name, NodeKind.Tree, Node1.Sha1), Node0 }; // Reversed
Assert.Throws<ArgumentException>(() => new TreeNodeList(nodes));
Assert.Throws<ArgumentException>(() => new TreeNodeList(nodes.ToList())); // ICollection<T>
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Duplicate_Sha1))]
public static void TreeNodeList_Duplicate_Sha1()
{
var nodes = new[] { new TreeNode(Node1.Name, NodeKind.Tree, Node0.Sha1), Node0 }; // Reversed
var tree0 = new TreeNodeList(nodes);
Assert.Collection<TreeNode>(tree0, n => Assert.Equal(n, Node0), n => Assert.Equal(n, nodes[0]));
tree0 = new TreeNodeList(nodes.ToList()); // ICollection<T>
Assert.Collection<TreeNode>(tree0, n => Assert.Equal(n, Node0), n => Assert.Equal(n, nodes[0]));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Equality))]
public static void TreeNodeList_Equality()
{
var expected = new TreeNodeList(new[] { new TreeNode("c1", NodeKind.Blob, Sha1.Hash("c1")), new TreeNode("c2", NodeKind.Tree, Sha1.Hash("c2")) });
var node3 = new TreeNode("c3", NodeKind.Tree, Sha1.Hash("c3"));
// Equal
var actual = new TreeNodeList().Merge(expected);
Assert.Equal(expected, actual);
Assert.Equal(expected.GetHashCode(), actual.GetHashCode());
Assert.True(actual.Equals((object)expected));
Assert.True(expected == actual);
Assert.False(expected != actual);
// Less Nodes
actual = new TreeNodeList().Merge(expected[0]);
Assert.NotEqual(expected, actual);
Assert.NotEqual(expected.GetHashCode(), actual.GetHashCode());
Assert.False(actual.Equals((object)expected));
Assert.False(expected == actual);
Assert.True(expected != actual);
// More Nodes
actual = new TreeNodeList().Merge(expected).Merge(node3);
Assert.NotEqual(expected, actual);
Assert.NotEqual(expected.GetHashCode(), actual.GetHashCode());
Assert.False(actual.Equals((object)expected));
Assert.False(expected == actual);
Assert.True(expected != actual);
// Different Nodes
actual = new TreeNodeList().Merge(expected[0]).Merge(node3);
Assert.NotEqual(expected, actual); // hashcode is the same (node count)
Assert.False(actual.Equals((object)expected));
Assert.False(expected == actual);
Assert.True(expected != actual);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_IndexOf))]
public static void TreeNodeList_IndexOf()
{
// Arrange
var actual = new TreeNodeList(new[] { Node0, Node1 });
// Action/Assert
Assert.Equal(-1, actual.IndexOf(null));
Assert.True(actual.IndexOf(Guid.NewGuid().ToString()) < 0);
Assert.Equal(0, actual.IndexOf(Node0.Name));
Assert.Equal(1, actual.IndexOf(Node1.Name));
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Merge_Empty))]
public static void TreeNodeList_Merge_Empty()
{
var emptyTreeNodeList = new TreeNodeList();
var node = new TreeNode("b", NodeKind.Blob, Sha1.Hash("Test1"));
var list = new TreeNodeList(node);
// TreeNodeList
var merged = list.Merge(emptyTreeNodeList);
Assert.Equal(list, merged);
merged = emptyTreeNodeList.Merge(list);
Assert.Equal(list, merged);
// ICollection
merged = list.Merge(Array.Empty<TreeNode>());
Assert.Equal(list, merged);
merged = emptyTreeNodeList.Merge(list.Values.ToArray());
Assert.Equal(list, merged);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Merge_Null))]
public static void TreeNodeList_Merge_Null()
{
// Arrange
var list = new TreeNodeList(Node0);
// Action
var merged = list.Merge(null);
// Assert
Assert.Equal(list, merged);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Merge_Single))]
public static void TreeNodeList_Merge_Single()
{
var list = new TreeNodeList();
list = list.Merge(new TreeNode("b", NodeKind.Blob, Sha1.Hash("Test1")));
list = list.Merge(new TreeNode("a", NodeKind.Tree, Sha1.Hash("Test2")));
list = list.Merge(new TreeNode("c", NodeKind.Blob, Sha1.Hash("Test3")));
list = list.Merge(new TreeNode("d", NodeKind.Tree, Sha1.Hash("Test4")));
list = list.Merge(new TreeNode("g", NodeKind.Blob, Sha1.Hash("Test5")));
list = list.Merge(new TreeNode("e", NodeKind.Tree, Sha1.Hash("Test6")));
list = list.Merge(new TreeNode("f", NodeKind.Blob, Sha1.Hash("Test7")));
var prev = list.Keys.First();
foreach (var cur in list.Keys.Skip(1))
{
Assert.True(string.CompareOrdinal(cur, prev) > 0);
prev = cur;
}
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Merge_Single_Exist))]
public static void TreeNodeList_Merge_Single_Exist()
{
// Arrange
var list = new TreeNodeList();
var expectedName = Guid.NewGuid().ToString();
var expectedKind = NodeKind.Tree;
var expectedSha1 = Sha1.Hash(Guid.NewGuid().ToString());
list = list.Merge(new TreeNode(expectedName, NodeKind.Blob, Sha1.Hash("Test1")));
// Action
var actual = list.Merge(new TreeNode(expectedName, expectedKind, expectedSha1));
var actualNode = actual[expectedName];
// Assert
Assert.Equal(expectedName, actualNode.Name);
Assert.Equal(expectedKind, actualNode.Kind);
Assert.Equal(expectedSha1, actualNode.Sha1);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Merge_TreeNodeList))]
public static void TreeNodeList_Merge_TreeNodeList()
{
var list1 = new TreeNodeList();
list1 = list1.Merge(new TreeNode("d", NodeKind.Tree, Sha1.Hash("Test4")));
list1 = list1.Merge(new TreeNode("e", NodeKind.Tree, Sha1.Hash("Test5")));
list1 = list1.Merge(new TreeNode("f", NodeKind.Blob, Sha1.Hash("Test6")));
list1 = list1.Merge(new TreeNode("g", NodeKind.Blob, Sha1.Hash("Test7")));
var list2 = new TreeNodeList();
list2 = list2.Merge(new TreeNode("a", NodeKind.Tree, Sha1.Hash("Test1")));
list2 = list2.Merge(new TreeNode("b", NodeKind.Blob, Sha1.Hash("Test2")));
list2 = list2.Merge(new TreeNode("c", NodeKind.Blob, Sha1.Hash("Test3")));
list2 = list2.Merge(new TreeNode("d", NodeKind.Tree, Sha1.Hash("Test4 Replace")));
list2 = list2.Merge(new TreeNode("g", NodeKind.Blob, Sha1.Hash("Test5 Replace")));
list2 = list2.Merge(new TreeNode("q", NodeKind.Tree, Sha1.Hash("Test8")));
list2 = list2.Merge(new TreeNode("r", NodeKind.Blob, Sha1.Hash("Test9")));
var list3 = list1.Merge(list2);
Assert.Equal(9, list3.Count);
Assert.Equal("a", list3[0].Name);
Assert.Equal("b", list3[1].Name);
Assert.Equal("c", list3[2].Name);
Assert.Equal("d", list3[3].Name);
Assert.Equal("e", list3[4].Name);
Assert.Equal("f", list3[5].Name);
Assert.Equal("g", list3[6].Name);
Assert.Equal("q", list3[7].Name);
Assert.Equal("r", list3[8].Name);
Assert.Equal(list2[0].Sha1, list3[0].Sha1);
Assert.Equal(list2[1].Sha1, list3[1].Sha1);
Assert.Equal(list2[2].Sha1, list3[2].Sha1);
Assert.Equal(list2[3].Sha1, list3[3].Sha1);
Assert.Equal(list1[1].Sha1, list3[4].Sha1);
Assert.Equal(list1[2].Sha1, list3[5].Sha1);
Assert.Equal(list2[4].Sha1, list3[6].Sha1);
Assert.Equal(list2[5].Sha1, list3[7].Sha1);
Assert.Equal(list2[6].Sha1, list3[8].Sha1);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_Merge_Collection))]
public static void TreeNodeList_Merge_Collection()
{
var list1 = new TreeNodeList();
list1 = list1.Merge(new TreeNode("d", NodeKind.Tree, Sha1.Hash("Test4")));
list1 = list1.Merge(new TreeNode("e", NodeKind.Tree, Sha1.Hash("Test5")));
list1 = list1.Merge(new TreeNode("f", NodeKind.Blob, Sha1.Hash("Test6")));
list1 = list1.Merge(new TreeNode("g", NodeKind.Blob, Sha1.Hash("Test7")));
var list2 = new[]
{
new TreeNode("c", NodeKind.Blob, Sha1.Hash("Test3")),
new TreeNode("a", NodeKind.Tree, Sha1.Hash("Test1")),
new TreeNode("b", NodeKind.Blob, Sha1.Hash("Test2")),
new TreeNode("d", NodeKind.Tree, Sha1.Hash("Test4 Replace")),
new TreeNode("g", NodeKind.Blob, Sha1.Hash("Test5 Replace")),
new TreeNode("q", NodeKind.Tree, Sha1.Hash("Test8")),
new TreeNode("r", NodeKind.Blob, Sha1.Hash("Test9")),
};
var list3 = list1.Merge(list2);
Assert.Equal(9, list3.Count);
Assert.Equal("a", list3[0].Name);
Assert.Equal("b", list3[1].Name);
Assert.Equal("c", list3[2].Name);
Assert.Equal("d", list3[3].Name);
Assert.Equal("e", list3[4].Name);
Assert.Equal("f", list3[5].Name);
Assert.Equal("g", list3[6].Name);
Assert.Equal("q", list3[7].Name);
Assert.Equal("r", list3[8].Name);
Assert.Equal(list2[1].Sha1, list3[0].Sha1);
Assert.Equal(list2[2].Sha1, list3[1].Sha1);
Assert.Equal(list2[0].Sha1, list3[2].Sha1);
Assert.Equal(list2[3].Sha1, list3[3].Sha1);
Assert.Equal(list1[1].Sha1, list3[4].Sha1);
Assert.Equal(list1[2].Sha1, list3[5].Sha1);
Assert.Equal(list2[4].Sha1, list3[6].Sha1);
Assert.Equal(list2[5].Sha1, list3[7].Sha1);
Assert.Equal(list2[6].Sha1, list3[8].Sha1);
var dupes = new[]
{
new TreeNode(list2[0].Name, list2[0].Kind, list2[1].Sha1),
new TreeNode(list2[1].Name, list2[1].Kind, list2[2].Sha1),
new TreeNode(list2[2].Name, list2[2].Kind, list2[3].Sha1),
new TreeNode(list2[3].Name, list2[3].Kind, list2[0].Sha1)
};
list3 = list3.Merge(dupes);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_IReadOnlyDictionary_Empty_GetEnumerator))]
public static void TreeNodeList_IReadOnlyDictionary_Empty_GetEnumerator()
{
// Arrange
var treeNodeList = new TreeNodeList();
var readOnlyDictionary = treeNodeList as IReadOnlyDictionary<string, TreeNode>;
// Action
var enumerator = readOnlyDictionary.GetEnumerator();
// Assert
Assert.False(enumerator.MoveNext());
var current = enumerator.Current;
Assert.Null(current.Key);
Assert.Equal(TreeNode.Empty, current.Value);
}
[Trait("Type", "Unit")]
[Fact(DisplayName = nameof(TreeNodeList_IReadOnlyDictionary_GetEnumerator))]
public static void TreeNodeList_IReadOnlyDictionary_GetEnumerator()
{
// Arrange
var nodes = new[] { Node0, Node1 };
var treeNodeList = new TreeNodeList(nodes);
var readOnlyDictionary = treeNodeList as IReadOnlyDictionary<string, TreeNode>;
// Action
var enumerator = readOnlyDictionary.GetEnumerator();
// Assert
Assert.True(enumerator.MoveNext());
Assert.Equal(Node0, enumerator.Current.Value);
Assert.True(enumerator.MoveNext());
Assert.Equal(Node1, enumerator.Current.Value);
Assert.False(enumerator.MoveNext());
Assert.Equal(Node1, enumerator.Current.Value);
}
#endregion
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Hyak.Common.Internals;
using Microsoft.Azure.Management.Authorization;
using Microsoft.Azure.Management.Authorization.Models;
using Newtonsoft.Json.Linq;
namespace Microsoft.Azure.Management.Authorization
{
/// <summary>
/// TBD (see http://TBD for more information)
/// </summary>
internal partial class RoleDefinitionOperations : IServiceOperations<AuthorizationManagementClient>, IRoleDefinitionOperations
{
/// <summary>
/// Initializes a new instance of the RoleDefinitionOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
internal RoleDefinitionOperations(AuthorizationManagementClient client)
{
this._client = client;
}
private AuthorizationManagementClient _client;
/// <summary>
/// Gets a reference to the
/// Microsoft.Azure.Management.Authorization.AuthorizationManagementClient.
/// </summary>
public AuthorizationManagementClient Client
{
get { return this._client; }
}
/// <summary>
/// Creates or updates a role definition.
/// </summary>
/// <param name='roleDefinitionId'>
/// Required. Role definition id.
/// </param>
/// <param name='parameters'>
/// Required. Role definition.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Role definition create or update operation result.
/// </returns>
public async Task<RoleDefinitionCreateOrUpdateResult> CreateOrUpdateAsync(Guid roleDefinitionId, RoleDefinitionCreateOrUpdateParameters parameters, CancellationToken cancellationToken)
{
// Validate
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("roleDefinitionId", roleDefinitionId);
tracingParameters.Add("parameters", parameters);
TracingAdapter.Enter(invocationId, this, "CreateOrUpdateAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/providers/Microsoft.Authorization/roleDefinitions/";
url = url + Uri.EscapeDataString(roleDefinitionId.ToString());
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2015-07-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Put;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2015-07-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Serialize Request
string requestContent = null;
JToken requestDoc = null;
JObject propertiesValue = new JObject();
requestDoc = propertiesValue;
if (parameters.RoleDefinition != null)
{
if (parameters.RoleDefinition.Id != null)
{
propertiesValue["id"] = parameters.RoleDefinition.Id;
}
propertiesValue["name"] = parameters.RoleDefinition.Name.ToString();
if (parameters.RoleDefinition.Type != null)
{
propertiesValue["type"] = parameters.RoleDefinition.Type;
}
if (parameters.RoleDefinition.Properties != null)
{
JObject propertiesValue2 = new JObject();
propertiesValue["properties"] = propertiesValue2;
if (parameters.RoleDefinition.Properties.RoleName != null)
{
propertiesValue2["roleName"] = parameters.RoleDefinition.Properties.RoleName;
}
if (parameters.RoleDefinition.Properties.Description != null)
{
propertiesValue2["description"] = parameters.RoleDefinition.Properties.Description;
}
if (parameters.RoleDefinition.Properties.Type != null)
{
propertiesValue2["type"] = parameters.RoleDefinition.Properties.Type;
}
if (parameters.RoleDefinition.Properties.Permissions != null)
{
if (parameters.RoleDefinition.Properties.Permissions is ILazyCollection == false || ((ILazyCollection)parameters.RoleDefinition.Properties.Permissions).IsInitialized)
{
JArray permissionsArray = new JArray();
foreach (Permission permissionsItem in parameters.RoleDefinition.Properties.Permissions)
{
JObject permissionValue = new JObject();
permissionsArray.Add(permissionValue);
if (permissionsItem.Actions != null)
{
if (permissionsItem.Actions is ILazyCollection == false || ((ILazyCollection)permissionsItem.Actions).IsInitialized)
{
JArray actionsArray = new JArray();
foreach (string actionsItem in permissionsItem.Actions)
{
actionsArray.Add(actionsItem);
}
permissionValue["actions"] = actionsArray;
}
}
if (permissionsItem.NotActions != null)
{
if (permissionsItem.NotActions is ILazyCollection == false || ((ILazyCollection)permissionsItem.NotActions).IsInitialized)
{
JArray notActionsArray = new JArray();
foreach (string notActionsItem in permissionsItem.NotActions)
{
notActionsArray.Add(notActionsItem);
}
permissionValue["notActions"] = notActionsArray;
}
}
}
propertiesValue2["permissions"] = permissionsArray;
}
}
if (parameters.RoleDefinition.Properties.AssignableScopes != null)
{
if (parameters.RoleDefinition.Properties.AssignableScopes is ILazyCollection == false || ((ILazyCollection)parameters.RoleDefinition.Properties.AssignableScopes).IsInitialized)
{
JArray assignableScopesArray = new JArray();
foreach (string assignableScopesItem in parameters.RoleDefinition.Properties.AssignableScopes)
{
assignableScopesArray.Add(assignableScopesItem);
}
propertiesValue2["assignableScopes"] = assignableScopesArray;
}
}
}
}
requestContent = requestDoc.ToString(Newtonsoft.Json.Formatting.Indented);
httpRequest.Content = new StringContent(requestContent, Encoding.UTF8);
httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.Created)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, requestContent, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
RoleDefinitionCreateOrUpdateResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.Created)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new RoleDefinitionCreateOrUpdateResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
RoleDefinition roleDefinitionInstance = new RoleDefinition();
result.RoleDefinition = roleDefinitionInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
roleDefinitionInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
Guid nameInstance = Guid.Parse(((string)nameValue));
roleDefinitionInstance.Name = nameInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
roleDefinitionInstance.Type = typeInstance;
}
JToken propertiesValue3 = responseDoc["properties"];
if (propertiesValue3 != null && propertiesValue3.Type != JTokenType.Null)
{
RoleDefinitionProperties propertiesInstance = new RoleDefinitionProperties();
roleDefinitionInstance.Properties = propertiesInstance;
JToken roleNameValue = propertiesValue3["roleName"];
if (roleNameValue != null && roleNameValue.Type != JTokenType.Null)
{
string roleNameInstance = ((string)roleNameValue);
propertiesInstance.RoleName = roleNameInstance;
}
JToken descriptionValue = propertiesValue3["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
propertiesInstance.Description = descriptionInstance;
}
JToken typeValue2 = propertiesValue3["type"];
if (typeValue2 != null && typeValue2.Type != JTokenType.Null)
{
string typeInstance2 = ((string)typeValue2);
propertiesInstance.Type = typeInstance2;
}
JToken permissionsArray2 = propertiesValue3["permissions"];
if (permissionsArray2 != null && permissionsArray2.Type != JTokenType.Null)
{
foreach (JToken permissionsValue in ((JArray)permissionsArray2))
{
Permission permissionInstance = new Permission();
propertiesInstance.Permissions.Add(permissionInstance);
JToken actionsArray2 = permissionsValue["actions"];
if (actionsArray2 != null && actionsArray2.Type != JTokenType.Null)
{
foreach (JToken actionsValue in ((JArray)actionsArray2))
{
permissionInstance.Actions.Add(((string)actionsValue));
}
}
JToken notActionsArray2 = permissionsValue["notActions"];
if (notActionsArray2 != null && notActionsArray2.Type != JTokenType.Null)
{
foreach (JToken notActionsValue in ((JArray)notActionsArray2))
{
permissionInstance.NotActions.Add(((string)notActionsValue));
}
}
}
}
JToken assignableScopesArray2 = propertiesValue3["assignableScopes"];
if (assignableScopesArray2 != null && assignableScopesArray2.Type != JTokenType.Null)
{
foreach (JToken assignableScopesValue in ((JArray)assignableScopesArray2))
{
propertiesInstance.AssignableScopes.Add(((string)assignableScopesValue));
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Deletes the role definition.
/// </summary>
/// <param name='roleDefinitionId'>
/// Required. Role definition id.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Role definition delete operation result.
/// </returns>
public async Task<RoleDefinitionDeleteResult> DeleteAsync(string roleDefinitionId, CancellationToken cancellationToken)
{
// Validate
if (roleDefinitionId == null)
{
throw new ArgumentNullException("roleDefinitionId");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("roleDefinitionId", roleDefinitionId);
TracingAdapter.Enter(invocationId, this, "DeleteAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
url = url + roleDefinitionId;
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2015-07-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Delete;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2015-07-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
RoleDefinitionDeleteResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new RoleDefinitionDeleteResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
RoleDefinition roleDefinitionInstance = new RoleDefinition();
result.RoleDefinition = roleDefinitionInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
roleDefinitionInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
Guid nameInstance = Guid.Parse(((string)nameValue));
roleDefinitionInstance.Name = nameInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
roleDefinitionInstance.Type = typeInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
RoleDefinitionProperties propertiesInstance = new RoleDefinitionProperties();
roleDefinitionInstance.Properties = propertiesInstance;
JToken roleNameValue = propertiesValue["roleName"];
if (roleNameValue != null && roleNameValue.Type != JTokenType.Null)
{
string roleNameInstance = ((string)roleNameValue);
propertiesInstance.RoleName = roleNameInstance;
}
JToken descriptionValue = propertiesValue["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
propertiesInstance.Description = descriptionInstance;
}
JToken typeValue2 = propertiesValue["type"];
if (typeValue2 != null && typeValue2.Type != JTokenType.Null)
{
string typeInstance2 = ((string)typeValue2);
propertiesInstance.Type = typeInstance2;
}
JToken permissionsArray = propertiesValue["permissions"];
if (permissionsArray != null && permissionsArray.Type != JTokenType.Null)
{
foreach (JToken permissionsValue in ((JArray)permissionsArray))
{
Permission permissionInstance = new Permission();
propertiesInstance.Permissions.Add(permissionInstance);
JToken actionsArray = permissionsValue["actions"];
if (actionsArray != null && actionsArray.Type != JTokenType.Null)
{
foreach (JToken actionsValue in ((JArray)actionsArray))
{
permissionInstance.Actions.Add(((string)actionsValue));
}
}
JToken notActionsArray = permissionsValue["notActions"];
if (notActionsArray != null && notActionsArray.Type != JTokenType.Null)
{
foreach (JToken notActionsValue in ((JArray)notActionsArray))
{
permissionInstance.NotActions.Add(((string)notActionsValue));
}
}
}
}
JToken assignableScopesArray = propertiesValue["assignableScopes"];
if (assignableScopesArray != null && assignableScopesArray.Type != JTokenType.Null)
{
foreach (JToken assignableScopesValue in ((JArray)assignableScopesArray))
{
propertiesInstance.AssignableScopes.Add(((string)assignableScopesValue));
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Get role definition by name (GUID).
/// </summary>
/// <param name='roleDefinitionName'>
/// Required. Role definition name (GUID).
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Role definition get operation result.
/// </returns>
public async Task<RoleDefinitionGetResult> GetAsync(Guid roleDefinitionName, CancellationToken cancellationToken)
{
// Validate
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("roleDefinitionName", roleDefinitionName);
TracingAdapter.Enter(invocationId, this, "GetAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/providers/Microsoft.Authorization/roleDefinitions/";
url = url + Uri.EscapeDataString(roleDefinitionName.ToString());
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2015-07-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2015-07-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
RoleDefinitionGetResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new RoleDefinitionGetResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
RoleDefinition roleDefinitionInstance = new RoleDefinition();
result.RoleDefinition = roleDefinitionInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
roleDefinitionInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
Guid nameInstance = Guid.Parse(((string)nameValue));
roleDefinitionInstance.Name = nameInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
roleDefinitionInstance.Type = typeInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
RoleDefinitionProperties propertiesInstance = new RoleDefinitionProperties();
roleDefinitionInstance.Properties = propertiesInstance;
JToken roleNameValue = propertiesValue["roleName"];
if (roleNameValue != null && roleNameValue.Type != JTokenType.Null)
{
string roleNameInstance = ((string)roleNameValue);
propertiesInstance.RoleName = roleNameInstance;
}
JToken descriptionValue = propertiesValue["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
propertiesInstance.Description = descriptionInstance;
}
JToken typeValue2 = propertiesValue["type"];
if (typeValue2 != null && typeValue2.Type != JTokenType.Null)
{
string typeInstance2 = ((string)typeValue2);
propertiesInstance.Type = typeInstance2;
}
JToken permissionsArray = propertiesValue["permissions"];
if (permissionsArray != null && permissionsArray.Type != JTokenType.Null)
{
foreach (JToken permissionsValue in ((JArray)permissionsArray))
{
Permission permissionInstance = new Permission();
propertiesInstance.Permissions.Add(permissionInstance);
JToken actionsArray = permissionsValue["actions"];
if (actionsArray != null && actionsArray.Type != JTokenType.Null)
{
foreach (JToken actionsValue in ((JArray)actionsArray))
{
permissionInstance.Actions.Add(((string)actionsValue));
}
}
JToken notActionsArray = permissionsValue["notActions"];
if (notActionsArray != null && notActionsArray.Type != JTokenType.Null)
{
foreach (JToken notActionsValue in ((JArray)notActionsArray))
{
permissionInstance.NotActions.Add(((string)notActionsValue));
}
}
}
}
JToken assignableScopesArray = propertiesValue["assignableScopes"];
if (assignableScopesArray != null && assignableScopesArray.Type != JTokenType.Null)
{
foreach (JToken assignableScopesValue in ((JArray)assignableScopesArray))
{
propertiesInstance.AssignableScopes.Add(((string)assignableScopesValue));
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Get role definition by name (GUID).
/// </summary>
/// <param name='roleDefinitionId'>
/// Required. Role definition Id
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Role definition get operation result.
/// </returns>
public async Task<RoleDefinitionGetResult> GetByIdAsync(string roleDefinitionId, CancellationToken cancellationToken)
{
// Validate
if (roleDefinitionId == null)
{
throw new ArgumentNullException("roleDefinitionId");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("roleDefinitionId", roleDefinitionId);
TracingAdapter.Enter(invocationId, this, "GetByIdAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/";
url = url + roleDefinitionId;
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2015-07-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2015-07-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
RoleDefinitionGetResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new RoleDefinitionGetResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
RoleDefinition roleDefinitionInstance = new RoleDefinition();
result.RoleDefinition = roleDefinitionInstance;
JToken idValue = responseDoc["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
roleDefinitionInstance.Id = idInstance;
}
JToken nameValue = responseDoc["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
Guid nameInstance = Guid.Parse(((string)nameValue));
roleDefinitionInstance.Name = nameInstance;
}
JToken typeValue = responseDoc["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
roleDefinitionInstance.Type = typeInstance;
}
JToken propertiesValue = responseDoc["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
RoleDefinitionProperties propertiesInstance = new RoleDefinitionProperties();
roleDefinitionInstance.Properties = propertiesInstance;
JToken roleNameValue = propertiesValue["roleName"];
if (roleNameValue != null && roleNameValue.Type != JTokenType.Null)
{
string roleNameInstance = ((string)roleNameValue);
propertiesInstance.RoleName = roleNameInstance;
}
JToken descriptionValue = propertiesValue["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
propertiesInstance.Description = descriptionInstance;
}
JToken typeValue2 = propertiesValue["type"];
if (typeValue2 != null && typeValue2.Type != JTokenType.Null)
{
string typeInstance2 = ((string)typeValue2);
propertiesInstance.Type = typeInstance2;
}
JToken permissionsArray = propertiesValue["permissions"];
if (permissionsArray != null && permissionsArray.Type != JTokenType.Null)
{
foreach (JToken permissionsValue in ((JArray)permissionsArray))
{
Permission permissionInstance = new Permission();
propertiesInstance.Permissions.Add(permissionInstance);
JToken actionsArray = permissionsValue["actions"];
if (actionsArray != null && actionsArray.Type != JTokenType.Null)
{
foreach (JToken actionsValue in ((JArray)actionsArray))
{
permissionInstance.Actions.Add(((string)actionsValue));
}
}
JToken notActionsArray = permissionsValue["notActions"];
if (notActionsArray != null && notActionsArray.Type != JTokenType.Null)
{
foreach (JToken notActionsValue in ((JArray)notActionsArray))
{
permissionInstance.NotActions.Add(((string)notActionsValue));
}
}
}
}
JToken assignableScopesArray = propertiesValue["assignableScopes"];
if (assignableScopesArray != null && assignableScopesArray.Type != JTokenType.Null)
{
foreach (JToken assignableScopesValue in ((JArray)assignableScopesArray))
{
propertiesInstance.AssignableScopes.Add(((string)assignableScopesValue));
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
/// <summary>
/// Get all role definitions.
/// </summary>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// Role definition list operation result.
/// </returns>
public async Task<RoleDefinitionListResult> ListAsync(CancellationToken cancellationToken)
{
// Validate
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
TracingAdapter.Enter(invocationId, this, "ListAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + "/subscriptions/";
if (this.Client.Credentials.SubscriptionId != null)
{
url = url + Uri.EscapeDataString(this.Client.Credentials.SubscriptionId);
}
url = url + "/providers/Microsoft.Authorization/roleDefinitions";
List<string> queryParameters = new List<string>();
queryParameters.Add("api-version=2015-07-01");
if (queryParameters.Count > 0)
{
url = url + "?" + string.Join("&", queryParameters);
}
string baseUrl = this.Client.BaseUri.AbsoluteUri;
// Trim '/' character from the end of baseUrl and beginning of url.
if (baseUrl[baseUrl.Length - 1] == '/')
{
baseUrl = baseUrl.Substring(0, baseUrl.Length - 1);
}
if (url[0] == '/')
{
url = url.Substring(1);
}
url = baseUrl + "/" + url;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2015-07-01");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.Client.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
RoleDefinitionListResult result = null;
// Deserialize Response
if (statusCode == HttpStatusCode.OK)
{
cancellationToken.ThrowIfCancellationRequested();
string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
result = new RoleDefinitionListResult();
JToken responseDoc = null;
if (string.IsNullOrEmpty(responseContent) == false)
{
responseDoc = JToken.Parse(responseContent);
}
if (responseDoc != null && responseDoc.Type != JTokenType.Null)
{
JToken valueArray = responseDoc["value"];
if (valueArray != null && valueArray.Type != JTokenType.Null)
{
foreach (JToken valueValue in ((JArray)valueArray))
{
RoleDefinition roleDefinitionInstance = new RoleDefinition();
result.RoleDefinitions.Add(roleDefinitionInstance);
JToken idValue = valueValue["id"];
if (idValue != null && idValue.Type != JTokenType.Null)
{
string idInstance = ((string)idValue);
roleDefinitionInstance.Id = idInstance;
}
JToken nameValue = valueValue["name"];
if (nameValue != null && nameValue.Type != JTokenType.Null)
{
Guid nameInstance = Guid.Parse(((string)nameValue));
roleDefinitionInstance.Name = nameInstance;
}
JToken typeValue = valueValue["type"];
if (typeValue != null && typeValue.Type != JTokenType.Null)
{
string typeInstance = ((string)typeValue);
roleDefinitionInstance.Type = typeInstance;
}
JToken propertiesValue = valueValue["properties"];
if (propertiesValue != null && propertiesValue.Type != JTokenType.Null)
{
RoleDefinitionProperties propertiesInstance = new RoleDefinitionProperties();
roleDefinitionInstance.Properties = propertiesInstance;
JToken roleNameValue = propertiesValue["roleName"];
if (roleNameValue != null && roleNameValue.Type != JTokenType.Null)
{
string roleNameInstance = ((string)roleNameValue);
propertiesInstance.RoleName = roleNameInstance;
}
JToken descriptionValue = propertiesValue["description"];
if (descriptionValue != null && descriptionValue.Type != JTokenType.Null)
{
string descriptionInstance = ((string)descriptionValue);
propertiesInstance.Description = descriptionInstance;
}
JToken typeValue2 = propertiesValue["type"];
if (typeValue2 != null && typeValue2.Type != JTokenType.Null)
{
string typeInstance2 = ((string)typeValue2);
propertiesInstance.Type = typeInstance2;
}
JToken permissionsArray = propertiesValue["permissions"];
if (permissionsArray != null && permissionsArray.Type != JTokenType.Null)
{
foreach (JToken permissionsValue in ((JArray)permissionsArray))
{
Permission permissionInstance = new Permission();
propertiesInstance.Permissions.Add(permissionInstance);
JToken actionsArray = permissionsValue["actions"];
if (actionsArray != null && actionsArray.Type != JTokenType.Null)
{
foreach (JToken actionsValue in ((JArray)actionsArray))
{
permissionInstance.Actions.Add(((string)actionsValue));
}
}
JToken notActionsArray = permissionsValue["notActions"];
if (notActionsArray != null && notActionsArray.Type != JTokenType.Null)
{
foreach (JToken notActionsValue in ((JArray)notActionsArray))
{
permissionInstance.NotActions.Add(((string)notActionsValue));
}
}
}
}
JToken assignableScopesArray = propertiesValue["assignableScopes"];
if (assignableScopesArray != null && assignableScopesArray.Type != JTokenType.Null)
{
foreach (JToken assignableScopesValue in ((JArray)assignableScopesArray))
{
propertiesInstance.AssignableScopes.Add(((string)assignableScopesValue));
}
}
}
}
}
}
}
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Searchservice
{
using Microsoft.Rest;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// StorageAccounts operations.
/// </summary>
public partial interface IStorageAccounts
{
/// <summary>
/// Checks that account name is valid and is not in use.
/// </summary>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<CheckNameAvailabilityResult>> CheckNameAvailabilityWithHttpMessagesAsync(StorageAccountCheckNameAvailabilityParameters accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Asynchronously creates a new storage account with the specified
/// parameters. Existing accounts cannot be updated with this API and
/// should instead use the Update Storage Account API. If an account is
/// already created and subsequent PUT request is issued with exact
/// same set of properties, then HTTP 200 would be returned.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to provide for the created account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccount>> CreateWithHttpMessagesAsync(string resourceGroupName, string accountName, StorageAccountCreateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes a storage account in Microsoft Azure.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Returns the properties for the specified storage account including
/// but not limited to name, account type, location, and account
/// status. The ListKeys operation should be used to retrieve storage
/// keys.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccount>> GetPropertiesWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates the account type or tags for a storage account. It can also
/// be used to add a custom domain (note that custom domains cannot be
/// added via the Create operation). Only one custom domain is
/// supported per storage account. In order to replace a custom domain,
/// the old value must be cleared before a new value may be set. To
/// clear a custom domain, simply update the custom domain with empty
/// string. Then call update again with the new cutsom domain name. The
/// update API can only be used to update one of tags, accountType, or
/// customDomain per call. To update multiple of these properties, call
/// the API multiple times with one change per call. This call does not
/// change the storage keys for the account. If you want to change
/// storage account keys, use the RegenerateKey operation. The location
/// and name of the storage account cannot be changed after creation.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='parameters'>
/// The parameters to update on the account. Note that only one
/// property can be changed at a time using this API.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccount>> UpdateWithHttpMessagesAsync(string resourceGroupName, string accountName, StorageAccountUpdateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists the access keys for the specified storage account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='accountName'>
/// The name of the storage account.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccountKeys>> ListKeysWithHttpMessagesAsync(string resourceGroupName, string accountName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists all the storage accounts available under the subscription.
/// Note that storage keys are not returned; use the ListKeys operation
/// for this.
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccountListResult>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Lists all the storage accounts available under the given resource
/// group. Note that storage keys are not returned; use the ListKeys
/// operation for this.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccountListResult>> ListByResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Regenerates the access keys for the specified storage account.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group within the user's subscription.
/// </param>
/// <param name='accountName'>
/// The name of the storage account within the specified resource
/// group. Storage account names must be between 3 and 24 characters in
/// length and use numbers and lower-case letters only.
/// </param>
/// <param name='regenerateKey'>
/// Specifies name of the key which should be regenerated. key1 or key2
/// for the default keys
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.HttpOperationException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<HttpOperationResponse<StorageAccountKeys>> RegenerateKeyWithHttpMessagesAsync(string resourceGroupName, string accountName, StorageAccountRegenerateKeyParameters regenerateKey, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
using System;
using System.Collections;
using System.IO;
using System.Xml;
using UnityEngine;
using UnityEditor;
namespace MATSDK {
[CustomEditor(typeof(MATSettings))]
public class MATSettingsEditor : Editor
{
// Minimum version of Google Play Services required for Google Advertising Id collection
private long MinGPSVersion = 4030530;
private string sOk = "OK";
private string sCancel = "Cancel";
private string sSuccess = "Success";
private string sWarning = "Warning";
public override void OnInspectorGUI()
{
SetupUI();
}
private void SetupUI()
{
GUI.skin.label.wordWrap = true;
GUI.skin.button.wordWrap = true;
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField("Android", EditorStyles.boldLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.HelpBox("For Android builds, the MAT plugin requires a copy of the Google Play Services 4.0+ library.\n",
MessageType.Info);
EditorGUILayout.BeginHorizontal();
if (GUILayout.Button("Import Google Play Services"))
{
ImportGooglePlayServices();
}
EditorGUILayout.EndHorizontal();
EditorGUILayout.Space();
EditorGUILayout.Space();
EditorGUILayout.Space();
EditorGUILayout.Space();
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField("Delegate Callbacks", EditorStyles.boldLabel);
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
GUILayout.Label("You may debug server requests with the MATDelegate.cs functions by enabling delegate callbacks: \n\n" +
"MATBinding.SetDelegate(true); \n\n" +
"The MATDelegate.cs script must be attached to a GameObject named \"MobileAppTracker\" in order to receive the callbacks.\n");
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
if (GUILayout.Button("Create MobileAppTracker GameObject"))
{
if (GameObject.Find("MobileAppTracker") == null)
{
var obj = new GameObject("MobileAppTracker");
obj.AddComponent<MATDelegate>();
}
else
{
EditorUtility.DisplayDialog("MobileAppTracker exists", "A MobileAppTracker GameObject already exists", sOk);
}
}
EditorGUILayout.EndHorizontal();
}
private void ImportGooglePlayServices()
{
string sdkPath = GetAndroidSdkPath();
string gpsLibPath = FixSlashes(sdkPath) + FixSlashes("/extras/google/google_play_services/libproject/google-play-services_lib");
string gpsLibVersion = gpsLibPath + FixSlashes("/res/values/version.xml");
string gpsLibDestDir = FixSlashes("Assets/Plugins/Android/google-play-services_lib");
// Check that Android SDK is there
if (!HasAndroidSdk())
{
Debug.LogError("Android SDK not found.");
EditorUtility.DisplayDialog("Android SDK not found",
"The Android SDK path was not found. Please configure it in Unity > Edit > Preferences > External Tools.",
sOk);
return;
}
// Check that the Google Play Services lib project is there
if (!System.IO.Directory.Exists(gpsLibPath) || !System.IO.File.Exists(gpsLibVersion))
{
Debug.LogError("Google Play Services lib project not found at: " + gpsLibPath);
EditorUtility.DisplayDialog("Google Play Services library not found",
"Google Play Services could not be found in your Android SDK installation.\n" +
"Install from the SDK Manager under Extras > Google Play Services.", sOk);
return;
}
// Check GPS lib version for 4.0+ to support Advertising Id
if (!CheckForLibVersion(gpsLibVersion))
{
return;
}
// Create Assets/Plugins and Assets/Plugins/Android if not existing
CheckDirExists("Assets/Plugins");
CheckDirExists("Assets/Plugins/Android");
// Delete any existing google_play_services_lib destination directory
DeleteDirIfExists(gpsLibDestDir);
// Copy Google Play Services library
FileUtil.CopyFileOrDirectory(gpsLibPath, gpsLibDestDir);
// Refresh assets, and we're done
AssetDatabase.Refresh();
EditorUtility.DisplayDialog(sSuccess,
"Google Play Services imported successfully to Assets/Plugins/Android.", sOk);
}
private bool CheckForLibVersion(string gpsLibVersionFile)
{
var root = new XmlDocument();
root.Load(gpsLibVersionFile);
// Read the version number from the res/values/version.xml
var versionNode = root.SelectSingleNode("resources/integer[@name='google_play_services_version']");
if (versionNode != null)
{
var version = versionNode.InnerText;
if (version == null || version == "")
{
Debug.LogError("Google Play Services lib version could not be read from: " + gpsLibVersionFile);
return EditorUtility.DisplayDialog(sWarning,
string.Format(
"The version of your Google Play Services could not be determined. Please make sure it is " +
"at least version {0}. Continue?",
MinGPSVersion),
sOk, sCancel);
}
else
{
// Convert version to long and compare to min version
long versionNum = System.Convert.ToInt64(version);
if (versionNum < MinGPSVersion)
{
return EditorUtility.DisplayDialog(sWarning,
string.Format(
"Your version of Google Play Services does not support Google Advertising Id." +
"Please update your Google Play Services to 4.0+." +
"Your version: {0}; required version: {1}). Proceed anyway?",
versionNum,
MinGPSVersion),
sOk, sCancel);
}
}
}
return true;
}
private string GetAndroidSdkPath()
{
string sdkPath = EditorPrefs.GetString("AndroidSdkRoot");
// Remove trailing slash if exists
if (sdkPath != null && (sdkPath.EndsWith("/") || sdkPath.EndsWith("\\")))
{
sdkPath = sdkPath.Substring(0, sdkPath.Length - 1);
}
return sdkPath;
}
private bool HasAndroidSdk()
{
string sdkPath = GetAndroidSdkPath();
return sdkPath != null && sdkPath.Trim() != "" && System.IO.Directory.Exists(sdkPath);
}
private void CheckDirExists(string dir)
{
dir = dir.Replace("/", System.IO.Path.DirectorySeparatorChar.ToString());
if (!System.IO.Directory.Exists(dir))
{
System.IO.Directory.CreateDirectory(dir);
}
}
private void DeleteDirIfExists(string dir)
{
if (System.IO.Directory.Exists(dir))
{
System.IO.Directory.Delete(dir, true);
}
}
private string FixSlashes(string path)
{
return path.Replace("/", System.IO.Path.DirectorySeparatorChar.ToString());
}
}
}
| |
// *****************************************************************************
//
// (c) Crownwood Consulting Limited 2002
// All rights reserved. The software and associated documentation
// supplied hereunder are the proprietary information of Crownwood Consulting
// Limited, Haxey, North Lincolnshire, England and are supplied subject to
// licence terms.
//
// IDE Version 1.7 www.dotnetmagic.com
// *****************************************************************************
using System;
using System.IO;
using System.Xml;
using System.Text;
using System.Data;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using IDE.Win32;
using IDE.Common;
using IDE.Controls;
namespace IDE.Controls
{
[ToolboxBitmap(typeof(TabbedGroups))]
internal class TabbedGroups : UserControl, ISupportInitialize, IMessageFilter
{
internal class DragProvider
{
protected object _tag;
public DragProvider()
{
_tag = null;
}
public DragProvider(object tag)
{
_tag = tag;
}
public object Tag
{
get { return _tag; }
set { _tag = value; }
}
}
internal enum DisplayTabModes
{
HideAll,
ShowAll,
ShowActiveLeaf,
ShowMouseOver,
ShowActiveAndMouseOver
}
internal enum CompactFlags
{
RemoveEmptyTabLeaf = 1,
RemoveEmptyTabSequence = 2,
ReduceSingleEntries = 4,
ReduceSameDirection = 8,
All = 15
}
// Instance fields
protected int _numLeafs;
protected int _defMinWidth;
protected int _defMinHeight;
protected string _closeMenuText;
protected string _prominentMenuText;
protected string _rebalanceMenuText;
protected string _movePreviousMenuText;
protected string _moveNextMenuText;
protected string _newVerticalMenuText;
protected string _newHorizontalMenuText;
protected ImageList _imageList;
protected bool _dirty;
protected bool _autoCalculateDirty;
protected bool _saveControls;
protected bool _initializing;
protected bool _atLeastOneLeaf;
protected bool _autoCompact;
protected bool _compacting;
protected bool _resizeBarLock;
protected int _resizeBarVector;
protected Color _resizeBarColor;
protected Shortcut _closeShortcut;
protected Shortcut _prominentShortcut;
protected Shortcut _rebalanceShortcut;
protected Shortcut _movePreviousShortcut;
protected Shortcut _moveNextShortcut;
protected Shortcut _splitVerticalShortcut;
protected Shortcut _splitHorizontalShortcut;
//protected Shortcut _nextTabShortcut;
protected CompactFlags _compactOptions;
protected DisplayTabModes _displayTabMode;
protected TabGroupLeaf _prominentLeaf;
protected TabGroupLeaf _activeLeaf;
protected TabGroupSequence _root;
protected VisualStyle _style;
// Delegates for events
internal delegate void TabControlCreatedHandler(TabbedGroups tg, Controls.TabControl tc);
internal delegate void PageCloseRequestHandler(TabbedGroups tg, TGCloseRequestEventArgs e);
internal delegate void PageContextMenuHandler(TabbedGroups tg, TGContextMenuEventArgs e);
internal delegate void GlobalSavingHandler(TabbedGroups tg, XmlTextWriter xmlOut);
internal delegate void GlobalLoadingHandler(TabbedGroups tg, XmlTextReader xmlIn);
internal delegate void PageSavingHandler(TabbedGroups tg, TGPageSavingEventArgs e);
internal delegate void PageLoadingHandler(TabbedGroups tg, TGPageLoadingEventArgs e);
internal delegate void ExternalDropHandler(TabbedGroups tg, TabGroupLeaf tgl, Controls.TabControl tc, DragProvider dp);
// Instance events
public event TabControlCreatedHandler TabControlCreated;
public event PageCloseRequestHandler PageCloseRequest;
public event PageContextMenuHandler PageContextMenu;
public event GlobalSavingHandler GlobalSaving;
public event GlobalLoadingHandler GlobalLoading;
public event PageSavingHandler PageSaving;
public event PageLoadingHandler PageLoading;
public event EventHandler ProminentLeafChanged;
public event EventHandler ActiveLeafChanged;
public event EventHandler DirtyChanged;
public event ExternalDropHandler ExternalDrop;
public TabbedGroups()
{
InternalConstruct(VisualStyle.IDE);
}
public TabbedGroups(VisualStyle style)
{
InternalConstruct(style);
}
protected void InternalConstruct(VisualStyle style)
{
// Prevent flicker with double buffering and all painting inside WM_PAINT
SetStyle(ControlStyles.DoubleBuffer, true);
SetStyle(ControlStyles.AllPaintingInWmPaint, true);
// We want to act as a drop target
this.AllowDrop = true;
// Remember parameters
_style = style;
// Define initial state
_numLeafs = 0;
_compacting = false;
_initializing = false;
// Create the root sequence that always exists
_root = new TabGroupSequence(this);
// Define default settings
ResetProminentLeaf();
ResetResizeBarVector();
ResetResizeBarColor();
ResetResizeBarLock();
ResetCompactOptions();
ResetDefaultGroupMinimumWidth();
ResetDefaultGroupMinimumHeight();
ResetActiveLeaf();
ResetAutoCompact();
ResetAtLeastOneLeaf();
ResetCloseMenuText();
ResetProminentMenuText();
ResetRebalanceMenuText();
ResetMovePreviousMenuText();
ResetMoveNextMenuText();
ResetNewVerticalMenuText();
ResetNewHorizontalMenuText();
ResetCloseShortcut();
ResetProminentShortcut();
ResetRebalanceShortcut();
ResetMovePreviousShortcut();
ResetMoveNextShortcut();
ResetSplitVerticalShortcut();
ResetSplitHorizontalShortcut();
ResetImageList();
ResetDisplayTabMode();
ResetSaveControls();
ResetAutoCalculateDirty();
ResetDirty();
// Add ourself to the application filtering list
// (to snoop for shortcut combinations)
Application.AddMessageFilter(this);
}
[Category("TabbedGroups")]
[DefaultValue(typeof(VisualStyle), "IDE")]
public VisualStyle Style
{
get { return _style; }
set
{
if (_style != value)
{
_style = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.StyleChanged);
}
}
}
public void ResetStyle()
{
Style = VisualStyle.IDE;
}
[Browsable(false)]
public TabGroupSequence RootSequence
{
get { return _root; }
}
[Category("TabbedGroups")]
[DefaultValue(-1)]
public int ResizeBarVector
{
get { return _resizeBarVector; }
set
{
if (_resizeBarVector != value)
{
_resizeBarVector = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.ResizeBarVectorChanged);
}
}
}
public void ResetResizeBarVector()
{
ResizeBarVector = -1;
}
[Category("TabbedGroups")]
public Color ResizeBarColor
{
get { return _resizeBarColor; }
set
{
if (!_resizeBarColor.Equals(value))
{
_resizeBarColor = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.ResizeBarColorChanged);
}
}
}
protected bool ShouldSerializeResizeBackColor()
{
return _resizeBarColor != base.BackColor;
}
public void ResetResizeBarColor()
{
ResizeBarColor = base.BackColor;
}
[Category("TabbedGroups")]
[DefaultValue(false)]
public bool ResizeBarLock
{
get { return _resizeBarLock; }
set { _resizeBarLock = value; }
}
public void ResetResizeBarLock()
{
ResizeBarLock = false;
}
[Category("TabbedGroups")]
public TabGroupLeaf ProminentLeaf
{
get { return _prominentLeaf; }
set
{
if (_prominentLeaf != value)
{
_prominentLeaf = value;
// Mark layout as dirty
if (_autoCalculateDirty)
_dirty = true;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.ProminentChanged);
OnProminentLeafChanged(EventArgs.Empty);
}
}
}
public void ResetProminentLeaf()
{
ProminentLeaf = null;
}
[Category("TabbedGroups")]
[DefaultValue(typeof(CompactFlags), "All")]
public CompactFlags CompactOptions
{
get { return _compactOptions; }
set { _compactOptions = value; }
}
public void ResetCompactOptions()
{
CompactOptions = CompactFlags.All;
}
[Category("TabbedGroups")]
[DefaultValue(4)]
public int DefaultGroupMinimumWidth
{
get { return _defMinWidth; }
set
{
if (_defMinWidth != value)
{
_defMinWidth = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.MinimumSizeChanged);
}
}
}
public void ResetDefaultGroupMinimumWidth()
{
DefaultGroupMinimumWidth = 4;
}
[Category("TabbedGroups")]
[DefaultValue(4)]
public int DefaultGroupMinimumHeight
{
get { return _defMinHeight; }
set
{
if (_defMinHeight != value)
{
_defMinHeight = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.MinimumSizeChanged);
}
}
}
public void ResetDefaultGroupMinimumHeight()
{
DefaultGroupMinimumHeight = 4;
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("&Close")]
public string CloseMenuText
{
get { return _closeMenuText; }
set { _closeMenuText = value; }
}
public void ResetCloseMenuText()
{
CloseMenuText = "&Close";
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("Pro&minent")]
public string ProminentMenuText
{
get { return _prominentMenuText; }
set { _prominentMenuText = value; }
}
public void ResetProminentMenuText()
{
ProminentMenuText = "Pro&minent";
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("&Rebalance")]
public string RebalanceMenuText
{
get { return _rebalanceMenuText; }
set { _rebalanceMenuText = value; }
}
public void ResetRebalanceMenuText()
{
RebalanceMenuText = "&Rebalance";
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("Move to &Previous Tab Group")]
public string MovePreviousMenuText
{
get { return _movePreviousMenuText; }
set { _movePreviousMenuText = value; }
}
public void ResetMovePreviousMenuText()
{
MovePreviousMenuText = "Move to &Previous Tab Group";
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("Move to &Next Tab Group")]
public string MoveNextMenuText
{
get { return _moveNextMenuText; }
set { _moveNextMenuText = value; }
}
public void ResetMoveNextMenuText()
{
MoveNextMenuText = "Move to &Next Tab Group";
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("New &Vertical Tab Group")]
public string NewVerticalMenuText
{
get { return _newVerticalMenuText; }
set { _newVerticalMenuText = value; }
}
public void ResetNewVerticalMenuText()
{
NewVerticalMenuText = "New &Vertical Tab Group";
}
[Localizable(true)]
[Category("Text String")]
[DefaultValue("New &Horizontal Tab Group")]
public string NewHorizontalMenuText
{
get { return _newHorizontalMenuText; }
set { _newHorizontalMenuText = value; }
}
public void ResetNewHorizontalMenuText()
{
NewHorizontalMenuText = "New &Horizontal Tab Group";
}
[Category("Shortcuts")]
public Shortcut CloseShortcut
{
get { return _closeShortcut; }
set { _closeShortcut = value; }
}
protected bool ShouldSerializeCloseShortcut()
{
return !_closeShortcut.Equals(Shortcut.CtrlShiftC);
}
public void ResetCloseShortcut()
{
CloseShortcut = Shortcut.CtrlShiftC;
}
[Category("Shortcuts")]
public Shortcut ProminentShortcut
{
get { return _prominentShortcut; }
set { _prominentShortcut = value; }
}
protected bool ShouldSerializeProminentShortcut()
{
return !_prominentShortcut.Equals(Shortcut.CtrlShiftT);
}
public void ResetProminentShortcut()
{
ProminentShortcut = Shortcut.CtrlShiftT;
}
[Category("Shortcuts")]
public Shortcut RebalanceShortcut
{
get { return _rebalanceShortcut; }
set { _rebalanceShortcut = value; }
}
protected bool ShouldSerializeRebalanceShortcut()
{
return !_rebalanceShortcut.Equals(Shortcut.CtrlShiftR);
}
public void ResetRebalanceShortcut()
{
RebalanceShortcut = Shortcut.CtrlShiftR;
}
[Category("Shortcuts")]
public Shortcut MovePreviousShortcut
{
get { return _movePreviousShortcut; }
set { _movePreviousShortcut = value; }
}
protected bool ShouldSerializeMovePreviousShortcut()
{
return !_movePreviousShortcut.Equals(Shortcut.CtrlShiftP);
}
public void ResetMovePreviousShortcut()
{
MovePreviousShortcut = Shortcut.CtrlShiftP;
}
[Category("Shortcuts")]
public Shortcut MoveNextShortcut
{
get { return _moveNextShortcut; }
set { _moveNextShortcut = value; }
}
protected bool ShouldSerializeMoveNextShortcut()
{
return !_moveNextShortcut.Equals(Shortcut.CtrlShiftN);
}
public void ResetMoveNextShortcut()
{
MoveNextShortcut = Shortcut.CtrlShiftN;
}
[Category("Shortcuts")]
public Shortcut SplitVerticalShortcut
{
get { return _splitVerticalShortcut; }
set { _splitVerticalShortcut = value; }
}
protected bool ShouldSerializeSplitVerticalShortcut()
{
return !_splitVerticalShortcut.Equals(Shortcut.CtrlShiftV);
}
public void ResetSplitVerticalShortcut()
{
SplitVerticalShortcut = Shortcut.CtrlShiftV;
}
[Category("Shortcuts")]
public Shortcut SplitHorizontalShortcut
{
get { return _splitHorizontalShortcut; }
set { _splitHorizontalShortcut = value; }
}
protected bool ShouldSerializeSplitHorizontalShortcut()
{
return !_splitHorizontalShortcut.Equals(Shortcut.CtrlShiftH);
}
public void ResetSplitHorizontalShortcut()
{
SplitHorizontalShortcut = Shortcut.CtrlShiftH;
}
[Category("TabbedGroups")]
public ImageList ImageList
{
get { return _imageList; }
set
{
if (_imageList != value)
{
// Propogate to all children
Notify(TabGroupBase.NotifyCode.ImageListChanging);
_imageList = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.ImageListChanged);
}
}
}
protected bool ShouldSerializeImageList()
{
return _imageList != null;
}
public void ResetImageList()
{
ImageList = null;
}
[Category("TabbedGroups")]
[DefaultValue(typeof(DisplayTabModes), "ShowAll")]
public DisplayTabModes DisplayTabMode
{
get { return _displayTabMode; }
set
{
if (_displayTabMode != value)
{
_displayTabMode = value;
// Propogate to all children
Notify(TabGroupBase.NotifyCode.DisplayTabMode);
}
}
}
public void ResetDisplayTabMode()
{
DisplayTabMode = DisplayTabModes.ShowAll;
}
[Category("TabbedGroups")]
[DefaultValue(true)]
public bool SaveControls
{
get { return _saveControls; }
set { _saveControls = value; }
}
public void ResetSaveControls()
{
SaveControls = true;
}
[Category("TabbedGroups")]
public bool Dirty
{
get { return _dirty; }
set
{
if (_dirty != value)
{
_dirty = value;
OnDirtyChanged(EventArgs.Empty);
}
}
}
protected bool ShouldSerializeDirty()
{
return false;
}
public void ResetDirty()
{
Dirty = false;
}
[Category("TabbedGroups")]
[DefaultValue(true)]
public bool AutoCalculateDirty
{
get { return _autoCalculateDirty; }
set { _autoCalculateDirty = value; }
}
public void ResetAutoCalculateDirty()
{
AutoCalculateDirty = true;
}
[Category("TabbedGroups")]
public TabGroupLeaf ActiveLeaf
{
get { return _activeLeaf; }
set
{
if (_activeLeaf != value)
{
// Mark layout as dirty
if (_autoCalculateDirty)
_dirty = true;
// Remove selection highlight from old leaf
if (_activeLeaf != null)
{
// Get access to the contained tab control
TabControl tc = _activeLeaf.GroupControl as Controls.TabControl;
// Remove bold text for the selected page
tc.BoldSelectedPage = false;
_activeLeaf = null;
}
// Set selection highlight on new active leaf
if (value != null)
{
// Get access to the contained tab control
TabControl tc = value.GroupControl as Controls.TabControl;
// Remove bold text for the selected page
tc.BoldSelectedPage = true;
_activeLeaf = value;
}
// Is the tab mode dependant on the active leaf value
if ((_displayTabMode == DisplayTabModes.ShowActiveLeaf) ||
(_displayTabMode == DisplayTabModes.ShowActiveAndMouseOver))
{
// Yes, better notify a change in value so it can be applied
Notify(TabGroupBase.NotifyCode.DisplayTabMode);
}
OnActiveLeafChanged(EventArgs.Empty);
}
}
}
public void ResetActiveLeaf()
{
ActiveLeaf = null;
}
[Category("TabbedGroups")]
public bool AtLeastOneLeaf
{
get { return _atLeastOneLeaf; }
set
{
if (_atLeastOneLeaf != value)
{
_atLeastOneLeaf = value;
// Do always need at least one leaf?
if (_atLeastOneLeaf)
{
// Is there at least one?
if (_numLeafs == 0)
{
// No, create a default entry for the root sequence
_root.AddNewLeaf();
// Mark layout as dirty
if (_autoCalculateDirty)
_dirty = true;
}
}
else
{
// Are there some potential leaves not needed
if (_numLeafs > 0)
{
// Use compaction so only needed ones are retained
if (_autoCompact)
Compact();
}
}
}
}
}
public void ResetAtLeastOneLeaf()
{
AtLeastOneLeaf = true;
}
[Category("TabbedGroups")]
[DefaultValue(true)]
public bool AutoCompact
{
get { return _autoCompact; }
set { _autoCompact = value; }
}
public void ResetAutoCompact()
{
_autoCompact = true;
}
public void Rebalance()
{
_root.Rebalance(true);
}
public void Rebalance(bool recurse)
{
_root.Rebalance(recurse);
}
public void Compact()
{
Compact(_compactOptions);
}
public void Compact(CompactFlags flags)
{
// When entries are removed because of compacting this may cause the container object
// to start a compacting request. Prevent this recursion by using a simple varible.
if (!_compacting)
{
// We never compact when loading/initializing the contents
if (!_initializing)
{
_compacting = true;
_root.Compact(flags);
_compacting = false;
EnforceAtLeastOneLeaf();
}
}
}
public TabGroupLeaf FirstLeaf()
{
return RecursiveFindLeafInSequence(_root, true);
}
public TabGroupLeaf LastLeaf()
{
return RecursiveFindLeafInSequence(_root, false);
}
public TabGroupLeaf NextLeaf(TabGroupLeaf current)
{
// Get parent of the provided leaf
TabGroupSequence tgs = current.Parent as TabGroupSequence;
// Must have a valid parent sequence
if (tgs != null)
return RecursiveFindLeafInSequence(tgs, current, true);
else
return null;
}
public TabGroupLeaf PreviousLeaf(TabGroupLeaf current)
{
// Get parent of the provided leaf
TabGroupSequence tgs = current.Parent as TabGroupSequence;
// Must have a valid parent sequence
if (tgs != null)
return RecursiveFindLeafInSequence(tgs, current, false);
else
return null;
}
internal void MoveActiveToNearestFromLeaf(TabGroupBase oldLeaf)
{
// Must have a reference to begin movement
if (oldLeaf != null)
{
// Find the parent sequence of leaf, remember that a
// leaf must be contained within a sequence instance
TabGroupSequence tgs = oldLeaf.Parent as TabGroupSequence;
// Must be valid, but had better check anyway
if (tgs != null)
{
// Move relative to given base in the sequence
MoveActiveInSequence(tgs, oldLeaf);
}
}
}
internal void MoveActiveToNearestFromSequence(TabGroupSequence tgs)
{
// Is active leaf being moved from root sequence
if (_root == tgs)
{
// Then make nothing active
ActiveLeaf = null;
}
else
{
// Find the parent sequence of given sequence
TabGroupSequence tgsParent = tgs.Parent as TabGroupSequence;
// Must be valid, but had better check anyway
if (tgs != null)
{
// Move relative to given base in the sequence
MoveActiveInSequence(tgsParent, tgs);
}
}
}
public virtual void OnTabControlCreated(Controls.TabControl tc)
{
// Remember how many leafs there are
_numLeafs++;
// Define default values
tc.Appearance = IDE.Controls.TabControl.VisualAppearance.MultiDocument;
tc.BoldSelectedPage = false;
tc.IDEPixelBorder = true;
tc.ImageList = _imageList;
tc.Style = _style;
// Apply the current display tab mode setting
switch(_displayTabMode)
{
case TabbedGroups.DisplayTabModes.ShowAll:
tc.HideTabsMode = IDE.Controls.TabControl.HideTabsModes.ShowAlways;
break;
case TabbedGroups.DisplayTabModes.HideAll:
tc.HideTabsMode = IDE.Controls.TabControl.HideTabsModes.HideAlways;
break;
}
// Has anyone registered for the event?
if (TabControlCreated != null)
TabControlCreated(this, tc);
}
public virtual void OnPageCloseRequested(TGCloseRequestEventArgs e)
{
// Has anyone registered for the event?
if (PageCloseRequest != null)
PageCloseRequest(this, e);
}
public virtual void OnPageContextMenu(TGContextMenuEventArgs e)
{
// Has anyone registered for the event?
if (PageContextMenu != null)
PageContextMenu(this, e);
}
public virtual void OnGlobalSaving(XmlTextWriter xmlOut)
{
// Has anyone registered for the event?
if (GlobalSaving != null)
GlobalSaving(this, xmlOut);
}
public virtual void OnGlobalLoading(XmlTextReader xmlIn)
{
// Has anyone registered for the event?
if (GlobalLoading != null)
GlobalLoading(this, xmlIn);
}
public virtual void OnPageSaving(TGPageSavingEventArgs e)
{
// Has anyone registered for the event?
if (PageSaving != null)
PageSaving(this, e);
}
public virtual void OnPageLoading(TGPageLoadingEventArgs e)
{
// Has anyone registered for the event?
if (PageLoading != null)
PageLoading(this, e);
}
public virtual void OnProminentLeafChanged(EventArgs e)
{
// Has anyone registered for the event?
if (ProminentLeafChanged != null)
ProminentLeafChanged(this, e);
}
public virtual void OnActiveLeafChanged(EventArgs e)
{
// Has anyone registered for the event?
if (ActiveLeafChanged != null)
ActiveLeafChanged(this, e);
}
public virtual void OnDirtyChanged(EventArgs e)
{
// Has anyone registered for the event?
if (DirtyChanged != null)
DirtyChanged(this, e);
}
public virtual void OnExternalDrop(TabGroupLeaf tgl, Controls.TabControl tc, DragProvider dp)
{
// Has anyone registered for the event?
if (ExternalDrop != null)
ExternalDrop(this, tgl, tc, dp);
}
public void BeginInit()
{
_initializing = true;
}
public void EndInit()
{
_initializing = false;
// Inform the root sequence to reposition itself
_root.Reposition();
}
public bool Initializing
{
get { return _initializing; }
}
public byte[] SaveConfigToArray()
{
return SaveConfigToArray(Encoding.Unicode);
}
public byte[] SaveConfigToArray(Encoding encoding)
{
// Create a memory based stream
MemoryStream ms = new MemoryStream();
// Save into the file stream
SaveConfigToStream(ms, encoding);
// Must remember to close
ms.Close();
// Return an array of bytes that contain the streamed XML
return ms.GetBuffer();
}
public void SaveConfigToFile(string filename)
{
SaveConfigToFile(filename, Encoding.Unicode);
}
public void SaveConfigToFile(string filename, Encoding encoding)
{
// Create/Overwrite existing file
FileStream fs = new FileStream(filename, FileMode.Create);
// Save into the file stream
SaveConfigToStream(fs, encoding);
// Must remember to close
fs.Close();
}
public void SaveConfigToStream(Stream stream, Encoding encoding)
{
XmlTextWriter xmlOut = new XmlTextWriter(stream, encoding);
// Use indenting for readability
xmlOut.Formatting = Formatting.Indented;
// Always begin file with identification and warning
xmlOut.WriteStartDocument();
xmlOut.WriteComment(" IDE, The User Interface library for .NET (www.dotnetmagic.com) ");
xmlOut.WriteComment(" Modifying this generated file will probably render it invalid ");
// Associate a version number with the root element so that future version of the code
// will be able to be backwards compatible or at least recognise out of date versions
xmlOut.WriteStartElement("TabbedGroups");
xmlOut.WriteAttributeString("FormatVersion", "1");
if (_activeLeaf != null)
xmlOut.WriteAttributeString("ActiveLeaf", _activeLeaf.Unique.ToString());
else
xmlOut.WriteAttributeString("ActiveLeaf", "-1");
// Give handlers chance to embed custom data
xmlOut.WriteStartElement("CustomGlobalData");
OnGlobalSaving(xmlOut);
xmlOut.WriteEndElement();
// Save the root sequence
_root.SaveToXml(xmlOut);
// Terminate the root element and document
xmlOut.WriteEndElement();
xmlOut.WriteEndDocument();
// This should flush all actions and close the file
xmlOut.Close();
// Saved, so cannot be dirty any more
if (_autoCalculateDirty)
_dirty = false;
}
public void LoadConfigFromArray(byte[] buffer)
{
// Create a memory based stream
MemoryStream ms = new MemoryStream(buffer);
// Save into the file stream
LoadConfigFromStream(ms);
// Must remember to close
ms.Close();
}
public void LoadConfigFromFile(string filename)
{
// Open existing file
FileStream fs = new FileStream(filename, FileMode.Open);
// Load from the file stream
LoadConfigFromStream(fs);
// Must remember to close
fs.Close();
}
public void LoadConfigFromStream(Stream stream)
{
XmlTextReader xmlIn = new XmlTextReader(stream);
// Ignore whitespace, not interested
xmlIn.WhitespaceHandling = WhitespaceHandling.None;
// Moves the reader to the root element.
xmlIn.MoveToContent();
// Double check this has the correct element name
if (xmlIn.Name != "TabbedGroups")
throw new ArgumentException("Root element must be 'TabbedGroups'");
// Load the format version number
string version = xmlIn.GetAttribute(0);
string rawActiveLeaf = xmlIn.GetAttribute(1);
// Convert format version from string to double
int formatVersion = (int)Convert.ToDouble(version);
int activeLeaf = Convert.ToInt32(rawActiveLeaf);
// We can only load 1 upward version formats
if (formatVersion < 1)
throw new ArgumentException("Can only load Version 1 and upwards TabbedGroups Configuration files");
try
{
// Prevent compacting and reposition of children
BeginInit();
// Remove all existing contents
_root.Clear();
// Read to custom data element
if (!xmlIn.Read())
throw new ArgumentException("An element was expected but could not be read in");
if (xmlIn.Name != "CustomGlobalData")
throw new ArgumentException("Expected 'CustomData' element was not found");
bool finished = xmlIn.IsEmptyElement;
// Give handlers chance to reload custom saved data
OnGlobalLoading(xmlIn);
// Read everything until we get the end of custom data marker
while(!finished)
{
// Check it has the expected name
if (xmlIn.NodeType == XmlNodeType.EndElement)
finished = (xmlIn.Name == "CustomGlobalData");
if (!finished)
{
if (!xmlIn.Read())
throw new ArgumentException("An element was expected but could not be read in");
}
}
// Read the next well known lement
if (!xmlIn.Read())
throw new ArgumentException("An element was expected but could not be read in");
// Is it the expected element?
if (xmlIn.Name != "Sequence")
throw new ArgumentException("Element 'Sequence' was expected but not found");
// Reload the root sequence
_root.LoadFromXml(xmlIn);
// Move past the end element
if (!xmlIn.Read())
throw new ArgumentException("Could not read in next expected node");
// Check it has the expected name
if (xmlIn.NodeType != XmlNodeType.EndElement)
throw new ArgumentException("EndElement expected but not found");
}
finally
{
TabGroupLeaf newActive = null;
// Reset the active leaf correctly
TabGroupLeaf current = FirstLeaf();
while(current != null)
{
// Default to the first leaf if we cannot find a match
if (newActive == null)
newActive = current;
// Find an exact match?
if (current.Unique == activeLeaf)
{
newActive = current;
break;
}
current = NextLeaf(current);
}
// Reinstate the active leaf indication
if (newActive != null)
ActiveLeaf = newActive;
// Allow normal operation
EndInit();
}
xmlIn.Close();
// Just loaded, so cannot be dirty
if (_autoCalculateDirty)
_dirty = false;
}
protected TabGroupLeaf RecursiveFindLeafInSequence(TabGroupSequence tgs, bool forwards)
{
int count = tgs.Count;
for(int i=0; i<count; i++)
{
// Index depends on which direction we are processing
int index = (forwards == true) ? i : (tgs.Count - i - 1);
// Is this the needed leaf node?
if (tgs[index].IsLeaf)
return tgs[index] as TabGroupLeaf;
else
{
// Need to make a recursive check inside group
TabGroupLeaf leaf = RecursiveFindLeafInSequence(tgs[index] as TabGroupSequence, forwards);
if (leaf != null)
return leaf;
}
}
// Still no luck
return null;
}
protected TabGroupLeaf RecursiveFindLeafInSequence(TabGroupSequence tgs, TabGroupBase tgb, bool forwards)
{
int count = tgs.Count;
int index = tgs.IndexOf(tgb);
// Are we look for entries after the provided one?
if (forwards)
{
for(int i=index+1; i<count; i++)
{
// Is this the needed leaf node?
if (tgs[i].IsLeaf)
return tgs[i] as TabGroupLeaf;
else
{
TabGroupLeaf leaf = RecursiveFindLeafInSequence(tgs[i] as TabGroupSequence, forwards);
if (leaf != null)
return leaf;
}
}
}
else
{
// Now try each entry before that given
for(int i=index-1; i>=0; i--)
{
// Is this the needed leaf node?
if (tgs[i].IsLeaf)
return tgs[i] as TabGroupLeaf;
else
{
TabGroupLeaf leaf = RecursiveFindLeafInSequence(tgs[i] as TabGroupSequence, forwards);
if (leaf != null)
return leaf;
}
}
}
// Still no luck, try our own parent
if (tgs.Parent != null)
return RecursiveFindLeafInSequence(tgs.Parent as TabGroupSequence, tgs, forwards);
else
return null;
}
protected void MoveActiveInSequence(TabGroupSequence tgs, TabGroupBase child)
{
int count = tgs.Count;
int index = tgs.IndexOf(child);
// First try each entry after that given
for(int i=index+1; i<count; i++)
{
// Is this the needed leaf node?
if (tgs[i].IsLeaf)
{
// Make it active, and finish
ActiveLeaf = tgs[i] as TabGroupLeaf;
return;
}
else
{
// Need to make a recursive check inside group
if (RecursiveActiveInSequence(tgs[i] as TabGroupSequence, true))
return;
}
}
// Now try each entry before that given
for(int i=index-1; i>=0; i--)
{
// Is this the needed leaf node?
if (tgs[i].IsLeaf)
{
// Make it active, and finish
ActiveLeaf = tgs[i] as TabGroupLeaf;
return;
}
else
{
// Need to make a recursive check inside group
if (RecursiveActiveInSequence(tgs[i] as TabGroupSequence, false))
return;
}
}
// Still no luck, try our own parent
if (tgs.Parent != null)
MoveActiveInSequence(tgs.Parent as TabGroupSequence, tgs);
}
protected bool RecursiveActiveInSequence(TabGroupSequence tgs, bool forwards)
{
int count = tgs.Count;
for(int i=0; i<count; i++)
{
// Index depends on which direction we are processing
int index = (forwards == true) ? i : (tgs.Count - i - 1);
// Is this the needed leaf node?
if (tgs[index].IsLeaf)
{
// Make it active, and finish
ActiveLeaf = tgs[index] as TabGroupLeaf;
return true;
}
else
{
// Need to make a recursive check inside group
if (RecursiveActiveInSequence(tgs[index] as TabGroupSequence, forwards))
return true;
}
}
// Still no luck
return false;
}
protected void Notify(TabGroupBase.NotifyCode notifyCode)
{
// Propogate change notification only is we have a root sequence
if (_root != null)
_root.Notify(notifyCode);
}
internal void EnforceAtLeastOneLeaf()
{
// Should not add items during compacting operation
if (!_compacting)
{
// Ensure we enfore policy of at least one leaf
if (_atLeastOneLeaf)
{
// Is there at least one?
if (_numLeafs == 0)
{
// No, create a default entry for the root sequence
_root.AddNewLeaf();
// Update the active leaf
_activeLeaf = FirstLeaf();
// Mark layout as dirty
if (_autoCalculateDirty)
_dirty = true;
}
}
}
}
internal void GroupRemoved(TabGroupBase tgb)
{
// Decrease count of leafs entries for each leaf that exists
// which in the hierarchy that is being removed
if (tgb.IsLeaf)
_numLeafs--;
else
{
TabGroupSequence tgs = tgb as TabGroupSequence;
// Recurse into processing each child item
for(int i=0; i<tgs.Count; i++)
GroupRemoved(tgs[i]);
}
// Mark layout as dirty
if (_autoCalculateDirty)
_dirty = true;
}
public bool PreFilterMessage(ref Message msg)
{
Form parentForm = this.FindForm();
// Only interested if the Form we are on is activate (i.e. contains focus)
if ((parentForm != null) && (parentForm == Form.ActiveForm) && parentForm.ContainsFocus)
{
switch(msg.Msg)
{
case (int)Win32.Msgs.WM_KEYDOWN:
// Ignore keyboard input if the control is disabled
if (this.Enabled)
{
// Find up/down state of shift and control keys
ushort shiftKey = User32.GetKeyState((int)Win32.VirtualKeys.VK_SHIFT);
ushort controlKey = User32.GetKeyState((int)Win32.VirtualKeys.VK_CONTROL);
// Basic code we are looking for is the key pressed
int code = (int)msg.WParam;
// Is SHIFT pressed?
bool shiftPressed = (((int)shiftKey & 0x00008000) != 0);
// Is CONTROL pressed?
bool controlPressed = (((int)controlKey & 0x00008000) != 0);
// Was the TAB key pressed?
if ((code == (int)Win32.VirtualKeys.VK_TAB) && controlPressed)
{
if (shiftPressed)
return SelectPreviousTab();
else
return SelectNextTab();
}
else
{
// Plus the modifier for SHIFT...
if (shiftPressed)
code += 0x00010000;
// Plus the modifier for CONTROL
if (controlPressed)
code += 0x00020000;
// Construct shortcut from keystate and keychar
Shortcut sc = (Shortcut)(code);
// Search for a matching command
return TestShortcut(sc);
}
}
break;
case (int)Win32.Msgs.WM_SYSKEYDOWN:
// Ignore keyboard input if the control is disabled
if (this.Enabled)
{
if ((int)msg.WParam != (int)Win32.VirtualKeys.VK_MENU)
{
// Construct shortcut from ALT + keychar
Shortcut sc = (Shortcut)(0x00040000 + (int)msg.WParam);
// Search for a matching command
return TestShortcut(sc);
}
}
break;
default:
break;
}
}
return false;
}
protected bool TestShortcut(Shortcut sc)
{
bool result = false;
// Must have an active leaf for shortcuts to operate against
if (_activeLeaf != null)
{
Controls.TabControl tc = _activeLeaf.GroupControl as Controls.TabControl;
// Must have an active tab for these shortcuts to work against
if (tc.SelectedTab != null)
{
// Close selected page requested?
if (sc.Equals(_closeShortcut))
{
_activeLeaf.OnClose(_activeLeaf, EventArgs.Empty);
result = true;
}
// Toggle the prominence state?
if (sc.Equals(_prominentShortcut))
{
_activeLeaf.OnToggleProminent(_activeLeaf, EventArgs.Empty);
result = true;
}
// Move page to the next group?
if (sc.Equals(_moveNextShortcut))
{
_activeLeaf.OnMoveNext(_activeLeaf, EventArgs.Empty);
result = true;
}
// Move page to the previous group?
if (sc.Equals(_movePreviousShortcut))
{
_activeLeaf.OnMovePrevious(_activeLeaf, EventArgs.Empty);
result = true;
}
// Cannot split a group unless at least two entries exist
if (tc.TabPages.Count > 1)
{
bool allowVert = false;
bool allowHorz = false;
if (_root.Count <= 1)
{
allowVert = true;
allowHorz = true;
}
else
{
if (_root.Direction == Direction.Vertical)
allowVert = true;
else
allowHorz = true;
}
// Create two vertical groups
if (allowHorz && sc.Equals(_splitVerticalShortcut))
{
_activeLeaf.NewHorizontalGroup(_activeLeaf, false);
result = true;
}
// Create two horizontal groups
if (allowVert && sc.Equals(_splitHorizontalShortcut))
{
_activeLeaf.NewVerticalGroup(_activeLeaf, false);
result = true;
}
}
}
// Request to rebalance all spacing
if (sc.Equals(_rebalanceShortcut))
{
_activeLeaf.OnRebalance(_activeLeaf, EventArgs.Empty);
result = true;
}
}
return result;
}
protected bool SelectNextTab()
{
// If no active leaf...
if (_activeLeaf == null)
SelectFirstPage();
else
{
bool selectFirst = false;
TabGroupLeaf startLeaf = _activeLeaf;
TabGroupLeaf thisLeaf = startLeaf;
do
{
// Access to the embedded tab control
Controls.TabControl tc = thisLeaf.GroupControl as Controls.TabControl;
// Does it have any pages?
if (tc.TabPages.Count > 0)
{
// Are we allowed to select the first page?
if (selectFirst)
{
// Do it and exit loop
tc.SelectedIndex = 0;
// Must ensure this becomes the active leaf
if (thisLeaf != _activeLeaf)
ActiveLeaf = thisLeaf;
break;
}
else
{
// Is there another page after the selected one?
if (tc.SelectedIndex < tc.TabPages.Count - 1)
{
// Select new page and exit loop
tc.SelectedIndex = tc.SelectedIndex + 1;
break;
}
}
}
selectFirst = true;
// Find the next leaf in sequence
thisLeaf = NextLeaf(thisLeaf);
// No more leafs, wrap back to first
if (thisLeaf == null)
thisLeaf = FirstLeaf();
// Back at starting leaf?
if (thisLeaf == startLeaf)
{
// If it was not the first page that we started from
if (tc.SelectedIndex > 0)
{
// Then we have circles all the way around, select first page
tc.SelectedIndex = 0;
}
}
} while(thisLeaf != startLeaf);
}
return true;
}
protected bool SelectPreviousTab()
{
// If no active leaf...
if (_activeLeaf == null)
SelectLastPage();
else
{
bool selectLast = false;
TabGroupLeaf startLeaf = _activeLeaf;
TabGroupLeaf thisLeaf = startLeaf;
do
{
// Access to the embedded tab control
Controls.TabControl tc = thisLeaf.GroupControl as Controls.TabControl;
// Does it have any pages?
if (tc.TabPages.Count > 0)
{
// Are we allowed to select the last page?
if (selectLast)
{
// Do it and exit loop
tc.SelectedIndex = tc.TabPages.Count - 1;
// Must ensure this becomes the active leaf
if (thisLeaf != _activeLeaf)
ActiveLeaf = thisLeaf;
break;
}
else
{
// Is there another page before the selected one?
if (tc.SelectedIndex > 0)
{
// Select previous page and exit loop
tc.SelectedIndex = tc.SelectedIndex - 1;
break;
}
}
}
selectLast = true;
// Find the previous leaf in sequence
thisLeaf = PreviousLeaf(thisLeaf);
// No more leafs, wrap back to first
if (thisLeaf == null)
thisLeaf = LastLeaf();
// Back at starting leaf?
if (thisLeaf == startLeaf)
{
// If it was not the first page that we started from
if (tc.SelectedIndex == 0)
{
// Then we have circles all the way around, select last page
tc.SelectedIndex = tc.TabPages.Count - 1;
}
}
} while(thisLeaf != startLeaf);
}
return true;
}
protected void SelectFirstPage()
{
// Find the first leaf
ActiveLeaf = FirstLeaf();
// Did we find a leaf?
if (_activeLeaf != null)
{
// Is there a page that can be selected?
if (_activeLeaf.TabPages.Count > 0)
_activeLeaf.TabPages[0].Selected = true;
}
}
protected void SelectLastPage()
{
// Find the first leaf
ActiveLeaf = LastLeaf();
// Did we find a leaf?
if (_activeLeaf != null)
{
// Is there a page that can be selected?
if (_activeLeaf.TabPages.Count > 0)
_activeLeaf.TabPages[_activeLeaf.TabPages.Count - 1].Selected = true;
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Automation.Peers.AutomationPeer.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Automation.Peers
{
abstract public partial class AutomationPeer : System.Windows.Threading.DispatcherObject
{
#region Methods and constructors
protected AutomationPeer()
{
}
public string GetAcceleratorKey()
{
return default(string);
}
protected abstract string GetAcceleratorKeyCore();
public string GetAccessKey()
{
return default(string);
}
protected abstract string GetAccessKeyCore();
public AutomationControlType GetAutomationControlType()
{
return default(AutomationControlType);
}
protected abstract AutomationControlType GetAutomationControlTypeCore();
public string GetAutomationId()
{
return default(string);
}
protected abstract string GetAutomationIdCore();
public System.Windows.Rect GetBoundingRectangle()
{
return default(System.Windows.Rect);
}
protected abstract System.Windows.Rect GetBoundingRectangleCore();
public List<System.Windows.Automation.Peers.AutomationPeer> GetChildren()
{
return default(List<System.Windows.Automation.Peers.AutomationPeer>);
}
protected abstract List<System.Windows.Automation.Peers.AutomationPeer> GetChildrenCore();
public string GetClassName()
{
return default(string);
}
protected abstract string GetClassNameCore();
public System.Windows.Point GetClickablePoint()
{
return default(System.Windows.Point);
}
protected abstract System.Windows.Point GetClickablePointCore();
public string GetHelpText()
{
return default(string);
}
protected abstract string GetHelpTextCore();
protected virtual new HostedWindowWrapper GetHostRawElementProviderCore()
{
return default(HostedWindowWrapper);
}
public string GetItemStatus()
{
return default(string);
}
protected abstract string GetItemStatusCore();
public string GetItemType()
{
return default(string);
}
protected abstract string GetItemTypeCore();
public System.Windows.Automation.Peers.AutomationPeer GetLabeledBy()
{
return default(System.Windows.Automation.Peers.AutomationPeer);
}
protected abstract System.Windows.Automation.Peers.AutomationPeer GetLabeledByCore();
public string GetLocalizedControlType()
{
return default(string);
}
protected virtual new string GetLocalizedControlTypeCore()
{
return default(string);
}
public string GetName()
{
return default(string);
}
protected abstract string GetNameCore();
public AutomationOrientation GetOrientation()
{
return default(AutomationOrientation);
}
protected abstract AutomationOrientation GetOrientationCore();
public System.Windows.Automation.Peers.AutomationPeer GetParent()
{
return default(System.Windows.Automation.Peers.AutomationPeer);
}
public abstract Object GetPattern(PatternInterface patternInterface);
public bool HasKeyboardFocus()
{
return default(bool);
}
protected abstract bool HasKeyboardFocusCore();
public void InvalidatePeer()
{
}
public bool IsContentElement()
{
return default(bool);
}
protected abstract bool IsContentElementCore();
public bool IsControlElement()
{
return default(bool);
}
protected abstract bool IsControlElementCore();
public bool IsEnabled()
{
return default(bool);
}
protected abstract bool IsEnabledCore();
public bool IsKeyboardFocusable()
{
return default(bool);
}
protected abstract bool IsKeyboardFocusableCore();
public bool IsOffscreen()
{
return default(bool);
}
protected abstract bool IsOffscreenCore();
public bool IsPassword()
{
return default(bool);
}
protected abstract bool IsPasswordCore();
public bool IsRequiredForForm()
{
return default(bool);
}
protected abstract bool IsRequiredForFormCore();
public static bool ListenerExists(AutomationEvents eventId)
{
return default(bool);
}
protected System.Windows.Automation.Peers.AutomationPeer PeerFromProvider(System.Windows.Automation.Provider.IRawElementProviderSimple provider)
{
return default(System.Windows.Automation.Peers.AutomationPeer);
}
protected internal System.Windows.Automation.Provider.IRawElementProviderSimple ProviderFromPeer(System.Windows.Automation.Peers.AutomationPeer peer)
{
return default(System.Windows.Automation.Provider.IRawElementProviderSimple);
}
public void RaiseAsyncContentLoadedEvent(System.Windows.Automation.AsyncContentLoadedEventArgs args)
{
}
public void RaiseAutomationEvent(AutomationEvents eventId)
{
}
public void RaisePropertyChangedEvent(System.Windows.Automation.AutomationProperty property, Object oldValue, Object newValue)
{
}
public void ResetChildrenCache()
{
}
public void SetFocus()
{
}
protected abstract void SetFocusCore();
#endregion
#region Properties and indexers
public System.Windows.Automation.Peers.AutomationPeer EventsSource
{
get
{
return default(System.Windows.Automation.Peers.AutomationPeer);
}
set
{
}
}
internal protected virtual new bool IsHwndHost
{
get
{
return default(bool);
}
}
#endregion
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Reflection;
using log4net;
using OpenMetaverse;
using OpenMetaverse.StructuredData;
namespace OpenSim.Framework
{
// Soon to be dismissed
[Serializable]
public class ChildAgentDataUpdate
{
public Guid ActiveGroupID;
public Guid AgentID;
public bool alwaysrun;
public float AVHeight;
public Vector3 cameraPosition;
public float drawdistance;
public float godlevel;
public uint GroupAccess;
public Vector3 Position;
public ulong regionHandle;
public byte[] throttles;
public Vector3 Velocity;
public ChildAgentDataUpdate()
{
}
}
public interface IAgentData
{
UUID AgentID { get; set; }
OSDMap Pack(EntityTransferContext ctx);
void Unpack(OSDMap map, IScene scene, EntityTransferContext ctx);
}
/// <summary>
/// Replacement for ChildAgentDataUpdate. Used over RESTComms and LocalComms.
/// </summary>
public class AgentPosition : IAgentData
{
private UUID m_id;
public UUID AgentID
{
get { return m_id; }
set { m_id = value; }
}
public ulong RegionHandle;
public uint CircuitCode;
public UUID SessionID;
public float Far;
public Vector3 Position;
public Vector3 Velocity;
public Vector3 Center;
public Vector3 Size;
public Vector3 AtAxis;
public Vector3 LeftAxis;
public Vector3 UpAxis;
//public int GodLevel;
public OSD GodData = null;
public bool ChangedGrid;
// This probably shouldn't be here
public byte[] Throttles;
public Dictionary<ulong, string> ChildrenCapSeeds = null;
public OSDMap Pack(EntityTransferContext ctx)
{
OSDMap args = new OSDMap();
args["message_type"] = OSD.FromString("AgentPosition");
args["region_handle"] = OSD.FromString(RegionHandle.ToString());
args["circuit_code"] = OSD.FromString(CircuitCode.ToString());
args["agent_uuid"] = OSD.FromUUID(AgentID);
args["session_uuid"] = OSD.FromUUID(SessionID);
args["position"] = OSD.FromString(Position.ToString());
args["velocity"] = OSD.FromString(Velocity.ToString());
args["center"] = OSD.FromString(Center.ToString());
args["size"] = OSD.FromString(Size.ToString());
args["at_axis"] = OSD.FromString(AtAxis.ToString());
args["left_axis"] = OSD.FromString(LeftAxis.ToString());
args["up_axis"] = OSD.FromString(UpAxis.ToString());
args["far"] = OSD.FromReal(Far);
args["changed_grid"] = OSD.FromBoolean(ChangedGrid);
//args["god_level"] = OSD.FromString(GodLevel.ToString());
if(GodData != null)
{
args["god_data"] = GodData;
OSDMap g = (OSDMap)GodData;
// Set legacy value
// TODO: remove after 0.9 is superseded
if (g.ContainsKey("ViewerUiIsGod"))
args["god_level"] = g["ViewerUiIsGod"].AsBoolean() ? 200 : 0;
}
if ((Throttles != null) && (Throttles.Length > 0))
args["throttles"] = OSD.FromBinary(Throttles);
if (ChildrenCapSeeds != null && ChildrenCapSeeds.Count > 0)
{
OSDArray childrenSeeds = new OSDArray(ChildrenCapSeeds.Count);
foreach (KeyValuePair<ulong, string> kvp in ChildrenCapSeeds)
{
OSDMap pair = new OSDMap();
pair["handle"] = OSD.FromString(kvp.Key.ToString());
pair["seed"] = OSD.FromString(kvp.Value);
childrenSeeds.Add(pair);
}
args["children_seeds"] = childrenSeeds;
}
return args;
}
public void Unpack(OSDMap args, IScene scene, EntityTransferContext ctx)
{
if (args.ContainsKey("region_handle"))
UInt64.TryParse(args["region_handle"].AsString(), out RegionHandle);
if (args["circuit_code"] != null)
UInt32.TryParse((string)args["circuit_code"].AsString(), out CircuitCode);
if (args["agent_uuid"] != null)
AgentID = args["agent_uuid"].AsUUID();
if (args["session_uuid"] != null)
SessionID = args["session_uuid"].AsUUID();
if (args["position"] != null)
Vector3.TryParse(args["position"].AsString(), out Position);
if (args["velocity"] != null)
Vector3.TryParse(args["velocity"].AsString(), out Velocity);
if (args["center"] != null)
Vector3.TryParse(args["center"].AsString(), out Center);
if (args["size"] != null)
Vector3.TryParse(args["size"].AsString(), out Size);
if (args["at_axis"] != null)
Vector3.TryParse(args["at_axis"].AsString(), out AtAxis);
if (args["left_axis"] != null)
Vector3.TryParse(args["left_axis"].AsString(), out LeftAxis);
if (args["up_axis"] != null)
Vector3.TryParse(args["up_axis"].AsString(), out UpAxis);
if (args["changed_grid"] != null)
ChangedGrid = args["changed_grid"].AsBoolean();
//if (args["god_level"] != null)
// Int32.TryParse(args["god_level"].AsString(), out GodLevel);
if (args.ContainsKey("god_data") && args["god_data"] != null)
GodData = args["god_data"];
if (args["far"] != null)
Far = (float)(args["far"].AsReal());
if (args["throttles"] != null)
Throttles = args["throttles"].AsBinary();
if (args.ContainsKey("children_seeds") && (args["children_seeds"] != null) &&
(args["children_seeds"].Type == OSDType.Array))
{
OSDArray childrenSeeds = (OSDArray)(args["children_seeds"]);
ChildrenCapSeeds = new Dictionary<ulong, string>();
foreach (OSD o in childrenSeeds)
{
if (o.Type == OSDType.Map)
{
ulong handle = 0;
string seed = "";
OSDMap pair = (OSDMap)o;
if (pair["handle"] != null)
if (!UInt64.TryParse(pair["handle"].AsString(), out handle))
continue;
if (pair["seed"] != null)
seed = pair["seed"].AsString();
if (!ChildrenCapSeeds.ContainsKey(handle))
ChildrenCapSeeds.Add(handle, seed);
}
}
}
}
/// <summary>
/// Soon to be decommissioned
/// </summary>
/// <param name="cAgent"></param>
public void CopyFrom(ChildAgentDataUpdate cAgent, UUID sid)
{
AgentID = new UUID(cAgent.AgentID);
SessionID = sid;
// next: ???
Size = new Vector3();
Size.Z = cAgent.AVHeight;
Center = cAgent.cameraPosition;
Far = cAgent.drawdistance;
Position = cAgent.Position;
RegionHandle = cAgent.regionHandle;
Throttles = cAgent.throttles;
Velocity = cAgent.Velocity;
}
}
public class AgentGroupData
{
public UUID GroupID;
public ulong GroupPowers;
public bool AcceptNotices;
public AgentGroupData(UUID id, ulong powers, bool notices)
{
GroupID = id;
GroupPowers = powers;
AcceptNotices = notices;
}
public AgentGroupData(OSDMap args)
{
UnpackUpdateMessage(args);
}
public OSDMap PackUpdateMessage()
{
OSDMap groupdata = new OSDMap();
groupdata["group_id"] = OSD.FromUUID(GroupID);
groupdata["group_powers"] = OSD.FromString(GroupPowers.ToString());
groupdata["accept_notices"] = OSD.FromBoolean(AcceptNotices);
return groupdata;
}
public void UnpackUpdateMessage(OSDMap args)
{
if (args["group_id"] != null)
GroupID = args["group_id"].AsUUID();
if (args["group_powers"] != null)
UInt64.TryParse((string)args["group_powers"].AsString(), out GroupPowers);
if (args["accept_notices"] != null)
AcceptNotices = args["accept_notices"].AsBoolean();
}
}
public class ControllerData
{
public UUID ObjectID;
public UUID ItemID;
public uint IgnoreControls;
public uint EventControls;
public ControllerData(UUID obj, UUID item, uint ignore, uint ev)
{
ObjectID = obj;
ItemID = item;
IgnoreControls = ignore;
EventControls = ev;
}
public ControllerData(OSDMap args)
{
UnpackUpdateMessage(args);
}
public OSDMap PackUpdateMessage()
{
OSDMap controldata = new OSDMap();
controldata["object"] = OSD.FromUUID(ObjectID);
controldata["item"] = OSD.FromUUID(ItemID);
controldata["ignore"] = OSD.FromInteger(IgnoreControls);
controldata["event"] = OSD.FromInteger(EventControls);
return controldata;
}
public void UnpackUpdateMessage(OSDMap args)
{
if (args["object"] != null)
ObjectID = args["object"].AsUUID();
if (args["item"] != null)
ItemID = args["item"].AsUUID();
if (args["ignore"] != null)
IgnoreControls = (uint)args["ignore"].AsInteger();
if (args["event"] != null)
EventControls = (uint)args["event"].AsInteger();
}
}
public class AgentData : IAgentData
{
private UUID m_id;
public UUID AgentID
{
get { return m_id; }
set { m_id = value; }
}
public UUID RegionID;
public uint CircuitCode;
public UUID SessionID;
public Vector3 Position;
public Vector3 Velocity;
public Vector3 Center;
public Vector3 Size;
public Vector3 AtAxis;
public Vector3 LeftAxis;
public Vector3 UpAxis;
/// <summary>
/// Signal on a V2 teleport that Scene.IncomingChildAgentDataUpdate(AgentData ad) should wait for the
/// scene presence to become root (triggered when the viewer sends a CompleteAgentMovement UDP packet after
/// establishing the connection triggered by it's receipt of a TeleportFinish EQ message).
/// </summary>
public bool SenderWantsToWaitForRoot;
public float Far;
public float Aspect;
//public int[] Throttles;
public byte[] Throttles;
public uint LocomotionState;
public Quaternion HeadRotation;
public Quaternion BodyRotation;
public uint ControlFlags;
public float EnergyLevel;
public OSD GodData = null;
//public Byte GodLevel;
public bool AlwaysRun;
public UUID PreyAgent;
public Byte AgentAccess;
public UUID ActiveGroupID;
public string ActiveGroupName;
public string ActiveGroupTitle = null;
public UUID agentCOF;
public byte CrossingFlags;
public byte CrossExtraFlags;
public Dictionary<ulong, string> ChildrenCapSeeds = null;
public Animation[] Anims;
public Animation DefaultAnim = null;
public Animation AnimState = null;
public Byte MotionState = 0;
public UUID ParentPart;
public Vector3 SitOffset;
// Appearance
public AvatarAppearance Appearance;
// DEBUG ON
private static readonly ILog m_log =
LogManager.GetLogger(
MethodBase.GetCurrentMethod().DeclaringType);
// DEBUG OFF
// Scripted
public ControllerData[] Controllers;
public string CallbackURI;
// These two must have the same Count
public List<ISceneObject> AttachmentObjects;
public List<string> AttachmentObjectStates;
public Dictionary<string, UUID> MovementAnimationOverRides = new Dictionary<string, UUID>();
public virtual OSDMap Pack(EntityTransferContext ctx)
{
// m_log.InfoFormat("[CHILDAGENTDATAUPDATE] Pack data");
OSDMap args = new OSDMap();
args["message_type"] = OSD.FromString("AgentData");
args["region_id"] = OSD.FromString(RegionID.ToString());
args["circuit_code"] = OSD.FromString(CircuitCode.ToString());
args["agent_uuid"] = OSD.FromUUID(AgentID);
args["session_uuid"] = OSD.FromUUID(SessionID);
args["position"] = OSD.FromString(Position.ToString());
args["velocity"] = OSD.FromString(Velocity.ToString());
args["center"] = OSD.FromString(Center.ToString());
args["size"] = OSD.FromString(Size.ToString());
args["at_axis"] = OSD.FromString(AtAxis.ToString());
args["left_axis"] = OSD.FromString(LeftAxis.ToString());
args["up_axis"] = OSD.FromString(UpAxis.ToString());
//backwards compatibility
args["changed_grid"] = OSD.FromBoolean(SenderWantsToWaitForRoot);
args["wait_for_root"] = OSD.FromBoolean(SenderWantsToWaitForRoot);
args["far"] = OSD.FromReal(Far);
args["aspect"] = OSD.FromReal(Aspect);
if ((Throttles != null) && (Throttles.Length > 0))
args["throttles"] = OSD.FromBinary(Throttles);
args["locomotion_state"] = OSD.FromString(LocomotionState.ToString());
args["head_rotation"] = OSD.FromString(HeadRotation.ToString());
args["body_rotation"] = OSD.FromString(BodyRotation.ToString());
args["control_flags"] = OSD.FromString(ControlFlags.ToString());
args["energy_level"] = OSD.FromReal(EnergyLevel);
//args["god_level"] = OSD.FromString(GodLevel.ToString());
if(GodData != null)
{
args["god_data"] = GodData;
OSDMap g = (OSDMap)GodData;
if (g.ContainsKey("ViewerUiIsGod"))
args["god_level"] = g["ViewerUiIsGod"].AsBoolean() ? 200 : 0;;
}
args["always_run"] = OSD.FromBoolean(AlwaysRun);
args["prey_agent"] = OSD.FromUUID(PreyAgent);
args["agent_access"] = OSD.FromString(AgentAccess.ToString());
args["agent_cof"] = OSD.FromUUID(agentCOF);
args["crossingflags"] = OSD.FromInteger(CrossingFlags);
if(CrossingFlags != 0)
args["crossExtraFlags"] = OSD.FromInteger(CrossExtraFlags);
args["active_group_id"] = OSD.FromUUID(ActiveGroupID);
args["active_group_name"] = OSD.FromString(ActiveGroupName);
if(ActiveGroupTitle != null)
args["active_group_title"] = OSD.FromString(ActiveGroupTitle);
if (ChildrenCapSeeds != null && ChildrenCapSeeds.Count > 0)
{
OSDArray childrenSeeds = new OSDArray(ChildrenCapSeeds.Count);
foreach (KeyValuePair<ulong, string> kvp in ChildrenCapSeeds)
{
OSDMap pair = new OSDMap();
pair["handle"] = OSD.FromString(kvp.Key.ToString());
pair["seed"] = OSD.FromString(kvp.Value);
childrenSeeds.Add(pair);
}
args["children_seeds"] = childrenSeeds;
}
if ((Anims != null) && (Anims.Length > 0))
{
OSDArray anims = new OSDArray(Anims.Length);
foreach (Animation aanim in Anims)
anims.Add(aanim.PackUpdateMessage());
args["animations"] = anims;
}
if (DefaultAnim != null)
{
args["default_animation"] = DefaultAnim.PackUpdateMessage();
}
if (AnimState != null)
{
args["animation_state"] = AnimState.PackUpdateMessage();
}
if (MovementAnimationOverRides.Count > 0)
{
OSDArray AOs = new OSDArray(MovementAnimationOverRides.Count);
{
foreach (KeyValuePair<string, UUID> kvp in MovementAnimationOverRides)
{
OSDMap ao = new OSDMap(2);
ao["state"] = OSD.FromString(kvp.Key);
ao["uuid"] = OSD.FromUUID(kvp.Value);
AOs.Add(ao);
}
}
args["movementAO"] = AOs;
}
if (MotionState != 0)
{
args["motion_state"] = OSD.FromInteger(MotionState);
}
if (Appearance != null)
args["packed_appearance"] = Appearance.Pack(ctx);
if ((Controllers != null) && (Controllers.Length > 0))
{
OSDArray controls = new OSDArray(Controllers.Length);
foreach (ControllerData ctl in Controllers)
controls.Add(ctl.PackUpdateMessage());
args["controllers"] = controls;
}
if ((CallbackURI != null) && (!CallbackURI.Equals("")))
args["callback_uri"] = OSD.FromString(CallbackURI);
// Attachment objects for fatpack messages
if (AttachmentObjects != null)
{
int i = 0;
OSDArray attObjs = new OSDArray(AttachmentObjects.Count);
foreach (ISceneObject so in AttachmentObjects)
{
OSDMap info = new OSDMap(4);
info["sog"] = OSD.FromString(so.ToXml2());
info["extra"] = OSD.FromString(so.ExtraToXmlString());
info["modified"] = OSD.FromBoolean(so.HasGroupChanged);
try
{
info["state"] = OSD.FromString(AttachmentObjectStates[i++]);
}
catch (IndexOutOfRangeException)
{
m_log.WarnFormat("[CHILD AGENT DATA]: scripts list is shorter than object list.");
}
attObjs.Add(info);
}
args["attach_objects"] = attObjs;
}
args["parent_part"] = OSD.FromUUID(ParentPart);
args["sit_offset"] = OSD.FromString(SitOffset.ToString());
return args;
}
/// <summary>
/// Deserialization of agent data.
/// Avoiding reflection makes it painful to write, but that's the price!
/// </summary>
/// <param name="hash"></param>
public virtual void Unpack(OSDMap args, IScene scene, EntityTransferContext ctx)
{
//m_log.InfoFormat("[CHILDAGENTDATAUPDATE] Unpack data");
if (args.ContainsKey("region_id"))
UUID.TryParse(args["region_id"].AsString(), out RegionID);
if (args["circuit_code"] != null)
UInt32.TryParse((string)args["circuit_code"].AsString(), out CircuitCode);
if (args["agent_uuid"] != null)
AgentID = args["agent_uuid"].AsUUID();
if (args["session_uuid"] != null)
SessionID = args["session_uuid"].AsUUID();
if (args["position"] != null)
Vector3.TryParse(args["position"].AsString(), out Position);
if (args["velocity"] != null)
Vector3.TryParse(args["velocity"].AsString(), out Velocity);
if (args["center"] != null)
Vector3.TryParse(args["center"].AsString(), out Center);
if (args["size"] != null)
Vector3.TryParse(args["size"].AsString(), out Size);
if (args["at_axis"] != null)
Vector3.TryParse(args["at_axis"].AsString(), out AtAxis);
if (args["left_axis"] != null)
Vector3.TryParse(args["left_axis"].AsString(), out AtAxis);
if (args["up_axis"] != null)
Vector3.TryParse(args["up_axis"].AsString(), out AtAxis);
if (args.ContainsKey("wait_for_root") && args["wait_for_root"] != null)
SenderWantsToWaitForRoot = args["wait_for_root"].AsBoolean();
if (args["far"] != null)
Far = (float)(args["far"].AsReal());
if (args["aspect"] != null)
Aspect = (float)args["aspect"].AsReal();
if (args["throttles"] != null)
Throttles = args["throttles"].AsBinary();
if (args["locomotion_state"] != null)
UInt32.TryParse(args["locomotion_state"].AsString(), out LocomotionState);
if (args["head_rotation"] != null)
Quaternion.TryParse(args["head_rotation"].AsString(), out HeadRotation);
if (args["body_rotation"] != null)
Quaternion.TryParse(args["body_rotation"].AsString(), out BodyRotation);
if (args["control_flags"] != null)
UInt32.TryParse(args["control_flags"].AsString(), out ControlFlags);
if (args["energy_level"] != null)
EnergyLevel = (float)(args["energy_level"].AsReal());
//if (args["god_level"] != null)
// Byte.TryParse(args["god_level"].AsString(), out GodLevel);
if (args.ContainsKey("god_data") && args["god_data"] != null)
GodData = args["god_data"];
if (args["always_run"] != null)
AlwaysRun = args["always_run"].AsBoolean();
if (args["prey_agent"] != null)
PreyAgent = args["prey_agent"].AsUUID();
if (args["agent_access"] != null)
Byte.TryParse(args["agent_access"].AsString(), out AgentAccess);
if (args.ContainsKey("agent_cof") && args["agent_cof"] != null)
agentCOF = args["agent_cof"].AsUUID();
if (args.ContainsKey("crossingflags") && args["crossingflags"] != null)
CrossingFlags = (byte)args["crossingflags"].AsInteger();
if(CrossingFlags != 0)
{
if (args.ContainsKey("crossExtraFlags") && args["crossExtraFlags"] != null)
CrossExtraFlags = (byte)args["crossExtraFlags"].AsInteger();
}
if (args.ContainsKey("active_group_id") && args["active_group_id"] != null)
ActiveGroupID = args["active_group_id"].AsUUID();
if (args.ContainsKey("active_group_name") && args["active_group_name"] != null)
ActiveGroupName = args["active_group_name"].AsString();
if(args.ContainsKey("active_group_title") && args["active_group_title"] != null)
ActiveGroupTitle = args["active_group_title"].AsString();
if (args.ContainsKey("children_seeds") && (args["children_seeds"] != null) &&
(args["children_seeds"].Type == OSDType.Array))
{
OSDArray childrenSeeds = (OSDArray)(args["children_seeds"]);
ChildrenCapSeeds = new Dictionary<ulong, string>();
foreach (OSD o in childrenSeeds)
{
if (o.Type == OSDType.Map)
{
ulong handle = 0;
string seed = "";
OSDMap pair = (OSDMap)o;
if (pair["handle"] != null)
if (!UInt64.TryParse(pair["handle"].AsString(), out handle))
continue;
if (pair["seed"] != null)
seed = pair["seed"].AsString();
if (!ChildrenCapSeeds.ContainsKey(handle))
ChildrenCapSeeds.Add(handle, seed);
}
}
}
if ((args["animations"] != null) && (args["animations"]).Type == OSDType.Array)
{
OSDArray anims = (OSDArray)(args["animations"]);
Anims = new Animation[anims.Count];
int i = 0;
foreach (OSD o in anims)
{
if (o.Type == OSDType.Map)
{
Anims[i++] = new Animation((OSDMap)o);
}
}
}
if (args["default_animation"] != null)
{
try
{
DefaultAnim = new Animation((OSDMap)args["default_animation"]);
}
catch
{
DefaultAnim = null;
}
}
if (args["animation_state"] != null)
{
try
{
AnimState = new Animation((OSDMap)args["animation_state"]);
}
catch
{
AnimState = null;
}
}
MovementAnimationOverRides.Clear();
if (args["movementAO"] != null && args["movementAO"].Type == OSDType.Array)
{
OSDArray AOs = (OSDArray)(args["movementAO"]);
int count = AOs.Count;
for (int i = 0; i < count; i++)
{
OSDMap ao = (OSDMap)AOs[i];
if (ao["state"] != null && ao["uuid"] != null)
{
string state = ao["state"].AsString();
UUID id = ao["uuid"].AsUUID();
MovementAnimationOverRides[state] = id;
}
}
}
if (args.ContainsKey("motion_state"))
MotionState = (byte)args["motion_state"].AsInteger();
//if ((args["agent_textures"] != null) && (args["agent_textures"]).Type == OSDType.Array)
//{
// OSDArray textures = (OSDArray)(args["agent_textures"]);
// AgentTextures = new UUID[textures.Count];
// int i = 0;
// foreach (OSD o in textures)
// AgentTextures[i++] = o.AsUUID();
//}
// packed_appearence should contain all appearance information
if (args.ContainsKey("packed_appearance") && (args["packed_appearance"]).Type == OSDType.Map)
{
m_log.WarnFormat("[CHILDAGENTDATAUPDATE] got packed appearance");
Appearance = new AvatarAppearance((OSDMap)args["packed_appearance"]);
}
else
{
// if missing try the old pack method
m_log.WarnFormat("[CHILDAGENTDATAUPDATE] No packed appearance, checking old method");
Appearance = new AvatarAppearance();
// The code to unpack textures, visuals, wearables and attachments
// should be removed; packed appearance contains the full appearance
// This is retained for backward compatibility only
if (args["texture_entry"] != null)
{
byte[] rawtextures = args["texture_entry"].AsBinary();
Primitive.TextureEntry textures = new Primitive.TextureEntry(rawtextures, 0, rawtextures.Length);
Appearance.SetTextureEntries(textures);
}
if (args["visual_params"] != null)
Appearance.SetVisualParams(args["visual_params"].AsBinary());
if ((args["wearables"] != null) && (args["wearables"]).Type == OSDType.Array)
{
OSDArray wears = (OSDArray)(args["wearables"]);
for (int i = 0; i < wears.Count / 2; i++)
{
AvatarWearable awear = new AvatarWearable((OSDArray)wears[i]);
Appearance.SetWearable(i, awear);
}
}
if ((args["attachments"] != null) && (args["attachments"]).Type == OSDType.Array)
{
OSDArray attachs = (OSDArray)(args["attachments"]);
foreach (OSD o in attachs)
{
if (o.Type == OSDType.Map)
{
// We know all of these must end up as attachments so we
// append rather than replace to ensure multiple attachments
// per point continues to work
// m_log.DebugFormat("[CHILDAGENTDATAUPDATE]: Appending attachments for {0}", AgentID);
Appearance.AppendAttachment(new AvatarAttachment((OSDMap)o));
}
}
}
// end of code to remove
}
/* moved above
if (args.ContainsKey("packed_appearance") && (args["packed_appearance"]).Type == OSDType.Map)
Appearance = new AvatarAppearance((OSDMap)args["packed_appearance"]);
else
m_log.WarnFormat("[CHILDAGENTDATAUPDATE] No packed appearance");
*/
if ((args["controllers"] != null) && (args["controllers"]).Type == OSDType.Array)
{
OSDArray controls = (OSDArray)(args["controllers"]);
Controllers = new ControllerData[controls.Count];
int i = 0;
foreach (OSD o in controls)
{
if (o.Type == OSDType.Map)
{
Controllers[i++] = new ControllerData((OSDMap)o);
}
}
}
if (args["callback_uri"] != null)
CallbackURI = args["callback_uri"].AsString();
// Attachment objects
if (args["attach_objects"] != null && args["attach_objects"].Type == OSDType.Array)
{
OSDArray attObjs = (OSDArray)(args["attach_objects"]);
AttachmentObjects = new List<ISceneObject>();
AttachmentObjectStates = new List<string>();
foreach (OSD o in attObjs)
{
if (o.Type == OSDType.Map)
{
OSDMap info = (OSDMap)o;
ISceneObject so = scene.DeserializeObject(info["sog"].AsString());
so.ExtraFromXmlString(info["extra"].AsString());
so.HasGroupChanged = info["modified"].AsBoolean();
AttachmentObjects.Add(so);
AttachmentObjectStates.Add(info["state"].AsString());
}
}
}
if (args["parent_part"] != null)
ParentPart = args["parent_part"].AsUUID();
if (args["sit_offset"] != null)
Vector3.TryParse(args["sit_offset"].AsString(), out SitOffset);
}
public AgentData()
{
}
public AgentData(Hashtable hash)
{
//UnpackUpdateMessage(hash);
}
public void Dump()
{
System.Console.WriteLine("------------ AgentData ------------");
System.Console.WriteLine("UUID: " + AgentID);
System.Console.WriteLine("Region: " + RegionID);
System.Console.WriteLine("Position: " + Position);
}
}
public class CompleteAgentData : AgentData
{
public override OSDMap Pack(EntityTransferContext ctx)
{
return base.Pack(ctx);
}
public override void Unpack(OSDMap map, IScene scene, EntityTransferContext ctx)
{
base.Unpack(map, scene, ctx);
}
}
}
| |
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Neo.Network.P2P;
using Neo.SmartContract;
using Neo.Wallets;
using System;
using System.Collections.Generic;
using System.Net;
using System.Numerics;
using System.Security.Cryptography;
namespace Neo.UnitTests
{
[TestClass]
public class UT_Helper
{
[TestMethod]
public void GetHashData()
{
TestVerifiable verifiable = new TestVerifiable();
byte[] res = verifiable.GetHashData();
res.Length.Should().Be(8);
byte[] requiredData = new byte[] { 7, 116, 101, 115, 116, 83, 116, 114 };
for (int i = 0; i < requiredData.Length; i++)
{
res[i].Should().Be(requiredData[i]);
}
}
[TestMethod]
public void Sign()
{
TestVerifiable verifiable = new TestVerifiable();
byte[] res = verifiable.Sign(new KeyPair(TestUtils.GetByteArray(32, 0x42)));
res.Length.Should().Be(64);
}
[TestMethod]
public void ToScriptHash()
{
byte[] testByteArray = TestUtils.GetByteArray(64, 0x42);
UInt160 res = testByteArray.ToScriptHash();
res.Should().Be(UInt160.Parse("2d3b96ae1bcc5a585e075e3b81920210dec16302"));
}
[TestMethod]
public void TestGetLowestSetBit()
{
var big1 = new BigInteger(0);
big1.GetLowestSetBit().Should().Be(-1);
var big2 = new BigInteger(512);
big2.GetLowestSetBit().Should().Be(9);
}
[TestMethod]
public void TestGetBitLength()
{
var b1 = new BigInteger(100);
b1.GetBitLength().Should().Be(7);
var b2 = new BigInteger(-100);
b2.GetBitLength().Should().Be(7);
}
[TestMethod]
public void TestHexToBytes()
{
string nullStr = null;
nullStr.HexToBytes().ToHexString().Should().Be(new byte[0].ToHexString());
string emptyStr = "";
emptyStr.HexToBytes().ToHexString().Should().Be(new byte[0].ToHexString());
string str1 = "hab";
Action action = () => str1.HexToBytes();
action.Should().Throw<FormatException>();
string str2 = "0102";
byte[] bytes = str2.HexToBytes();
bytes.ToHexString().Should().Be(new byte[] { 0x01, 0x02 }.ToHexString());
}
[TestMethod]
public void TestNextBigIntegerForRandom()
{
Random ran = new Random();
Action action1 = () => ran.NextBigInteger(-1);
action1.Should().Throw<ArgumentException>();
ran.NextBigInteger(0).Should().Be(0);
ran.NextBigInteger(8).Should().NotBeNull();
ran.NextBigInteger(9).Should().NotBeNull();
}
[TestMethod]
public void TestNextBigIntegerForRandomNumberGenerator()
{
var ran = RandomNumberGenerator.Create();
Action action1 = () => ran.NextBigInteger(-1);
action1.Should().Throw<ArgumentException>();
ran.NextBigInteger(0).Should().Be(0);
ran.NextBigInteger(8).Should().NotBeNull();
ran.NextBigInteger(9).Should().NotBeNull();
}
[TestMethod]
public void TestToInt64()
{
byte[] bytes = new byte[] { 0x01, 0x02, 0x03, 0x04 };
var ret = bytes.ToInt64(0);
ret.GetType().Should().Be(typeof(long));
ret.Should().Be(67305985);
}
[TestMethod]
public void TestToUInt16()
{
byte[] bytes = new byte[] { 0x01, 0x02, 0x03, 0x04 };
var ret = bytes.ToUInt16(0);
ret.GetType().Should().Be(typeof(ushort));
ret.Should().Be(513);
}
[TestMethod]
public void TestToUInt64()
{
byte[] bytes = new byte[] { 0x01, 0x02, 0x03, 0x04 };
var ret = bytes.ToUInt64(0);
ret.GetType().Should().Be(typeof(ulong));
ret.Should().Be(67305985);
}
[TestMethod]
public void TestUnmapForIPAddress()
{
var addr = new IPAddress(new byte[] { 127, 0, 0, 1 });
addr.Unmap().Should().Be(addr);
var addr2 = addr.MapToIPv6();
addr2.Unmap().Should().Be(addr);
}
[TestMethod]
public void TestUnmapForIPEndPoin()
{
var addr = new IPAddress(new byte[] { 127, 0, 0, 1 });
var endPoint = new IPEndPoint(addr, 8888);
endPoint.Unmap().Should().Be(endPoint);
var addr2 = addr.MapToIPv6();
var endPoint2 = new IPEndPoint(addr2, 8888);
endPoint2.Unmap().Should().Be(endPoint);
}
[TestMethod]
public void TestWeightedAverage()
{
var foo1 = new Foo
{
Value = 1,
Weight = 2
};
var foo2 = new Foo
{
Value = 2,
Weight = 3
};
var list = new List<Foo>
{
foo1,foo2
};
list.WeightedAverage(p => p.Value, p => p.Weight).Should().Be(new BigInteger(1));
var foo3 = new Foo
{
Value = 1,
Weight = 0
};
var foo4 = new Foo
{
Value = 2,
Weight = 0
};
var list2 = new List<Foo>
{
foo3, foo4
};
list2.WeightedAverage(p => p.Value, p => p.Weight).Should().Be(BigInteger.Zero);
}
[TestMethod]
public void WeightFilter()
{
var w1 = new Woo
{
Value = 1
};
var w2 = new Woo
{
Value = 2
};
var list = new List<Woo>
{
w1, w2
};
var ret = list.WeightedFilter(0.3, 0.6, p => p.Value, (p, w) => new Result
{
Info = p,
Weight = w
});
var sum = BigInteger.Zero;
foreach (Result res in ret)
{
sum = BigInteger.Add(res.Weight, sum);
}
sum.Should().Be(BigInteger.Zero);
var w3 = new Woo
{
Value = 3
};
var list2 = new List<Woo>
{
w1, w2, w3
};
var ret2 = list2.WeightedFilter(0.3, 0.4, p => p.Value, (p, w) => new Result
{
Info = p,
Weight = w
});
sum = BigInteger.Zero;
foreach (Result res in ret2)
{
sum = BigInteger.Add(res.Weight, sum);
}
sum.Should().Be(BigInteger.Zero);
CheckArgumentOutOfRangeException(-1, 0.4, p => p.Value, list2);
CheckArgumentOutOfRangeException(0.2, 1.4, p => p.Value, list2);
CheckArgumentOutOfRangeException(0.8, 0.3, p => p.Value, list2);
CheckArgumentOutOfRangeException(0.3, 0.8, p => p.Value, list2);
CheckArgumentNullException(0.3, 0.6, null, list2);
CheckArgumentNullException(0.3, 0.4, p => p.Value, null);
list2.WeightedFilter(0.3, 0.3, p => p.Value, (p, w) => new Result
{
Info = p,
Weight = w
}).WeightedAverage(p => p.Weight, p => p.Weight).Should().Be(0);
var list3 = new List<Woo>();
list3.WeightedFilter(0.3, 0.6, p => p.Value, (p, w) => new Result
{
Info = p,
Weight = w
}).WeightedAverage(p => p.Weight, p => p.Weight).Should().Be(0);
}
private static void CheckArgumentOutOfRangeException(double start, double end, Func<Woo, BigInteger> func, List<Woo> list)
{
Action action = () => list.WeightedFilter(start, end, func, (p, w) => new Result
{
Info = p,
Weight = w
}).WeightedAverage(p => p.Weight, p => p.Weight);
action.Should().Throw<ArgumentOutOfRangeException>();
}
private static void CheckArgumentNullException(double start, double end, Func<Woo, BigInteger> func, List<Woo> list)
{
Action action = () => list.WeightedFilter(start, end, func, (p, w) => new Result
{
Info = p,
Weight = w
}).WeightedAverage(p => p.Weight, p => p.Weight);
action.Should().Throw<ArgumentNullException>();
}
}
class Foo
{
public int Weight { set; get; }
public int Value { set; get; }
}
class Woo
{
public int Value { set; get; }
}
class Result
{
public Woo Info { set; get; }
public BigInteger Weight { set; get; }
}
}
| |
using System;
using System.Collections.Generic;
using Xunit;
namespace Peddler {
public class MaybeDefaultComparableGeneratorTests : MaybeDefaultDistinctGeneratorTests {
protected sealed override MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner) {
return this.MaybeDefaultComparable<T>(inner);
}
protected sealed override MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner,
T defaultValue) {
return this.MaybeDefaultComparable<T>(inner, defaultValue);
}
protected sealed override MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner,
decimal percentage) {
return this.MaybeDefaultComparable<T>(inner, percentage);
}
protected sealed override MaybeDefaultDistinctGenerator<T> MaybeDefaultDistinct<T>(
IComparableGenerator<T> inner,
T defaultValue,
decimal percentage) {
return this.MaybeDefaultComparable<T>(inner, defaultValue, percentage);
}
protected virtual MaybeDefaultComparableGenerator<T> MaybeDefaultComparable<T>(
IComparableGenerator<T> inner) {
var generator = new MaybeDefaultComparableGenerator<T>(inner);
Assert.Equal(default(T), generator.DefaultValue);
return generator;
}
protected virtual MaybeDefaultComparableGenerator<T> MaybeDefaultComparable<T>(
IComparableGenerator<T> inner,
T defaultValue) {
var generator = new MaybeDefaultComparableGenerator<T>(inner, defaultValue);
Assert.Equal(defaultValue, generator.DefaultValue);
return generator;
}
protected virtual MaybeDefaultComparableGenerator<T> MaybeDefaultComparable<T>(
IComparableGenerator<T> inner,
decimal percentage) {
var generator = new MaybeDefaultComparableGenerator<T>(inner, percentage);
Assert.Equal(default(T), generator.DefaultValue);
return generator;
}
protected virtual MaybeDefaultComparableGenerator<T> MaybeDefaultComparable<T>(
IComparableGenerator<T> inner,
T defaultValue,
decimal percentage) {
var generator = new MaybeDefaultComparableGenerator<T>(inner, defaultValue, percentage);
Assert.Equal(defaultValue, generator.DefaultValue);
return generator;
}
[Theory]
[MemberData(nameof(DefaultReturningGenerators))]
public void NextLessThan_InnerReturnsDefault(Object inner) {
this.InvokeGenericMethod(
nameof(NextLessThan_InnerReturnsDefaultImpl),
inner
);
}
protected void NextLessThan_InnerReturnsDefaultImpl<T>(DefaultGenerator<T> inner) {
var generator = this.MaybeDefaultComparable<T>(inner, inner.DefaultValue);
Assert.Throws<UnableToGenerateValueException>(
() => generator.NextLessThan(inner.DefaultValue)
);
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextLessThan_InnerFailsButDefaultOk_Struct(decimal percentage) {
var inner = new FakeStructGenerator(new Int32Generator(5, 10));
this.NextLessThan_InnerFailsButDefaultOk_StructImpl(
this.MaybeDefaultComparable(
inner,
percentage
)
);
this.NextLessThan_InnerFailsButDefaultOk_StructImpl(
this.MaybeDefaultComparable(
inner,
new FakeStruct { Value = -100 },
percentage
)
);
}
private void NextLessThan_InnerFailsButDefaultOk_StructImpl(
MaybeDefaultComparableGenerator<FakeStruct> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// 2 is less than the range of FakeStructGenerator,
// but greater then default (which is 0).
var value = generator.NextLessThan(new FakeStruct { Value = 2 });
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
}
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextLessThan_InnerFailsButDefaultOk_Class(decimal percentage) {
var inner = new FakeClassGenerator(new Int32Generator(5, 10));
this.NextLessThan_InnerFailsButDefaultOk_ClassImpl(
this.MaybeDefaultComparable(
inner,
percentage
)
);
this.NextLessThan_InnerFailsButDefaultOk_ClassImpl(
this.MaybeDefaultComparable(
inner,
new FakeClass(-100),
percentage
)
);
}
private void NextLessThan_InnerFailsButDefaultOk_ClassImpl(
MaybeDefaultComparableGenerator<FakeClass> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// 2 is less than the range of FakeClassGenerator,
// but greater then default (which is null).
// null is always considered "less" than non-null values.
var value = generator.NextLessThan(new FakeClass(2));
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
}
}
public static IEnumerable<object[]> NextLessThan_FiftyPercentOfDefault_Data {
get {
return new List<object[]> {
new object[] {
new FakeStructGenerator(new Int32Generator(2, 5)),
new FakeStruct { Value = 3 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(2, 5)),
new FakeStruct { Value = 5 }
},
new object[] {
new FakeClassGenerator(new Int32Generator(2, 5)),
new FakeClass(3)
},
new object[] {
new FakeClassGenerator(new Int32Generator(2, 5)),
new FakeClass(5)
}
};
}
}
[Theory]
[MemberData(nameof(NextLessThan_FiftyPercentOfDefault_Data))]
public void NextLessThan_FiftyPercentOfDefault(Object inner, Object otherValue) {
this.InvokeGenericMethod(
nameof(NextLessThan_FiftyPercentOfDefaultImpl),
inner,
otherValue
);
}
protected void NextLessThan_FiftyPercentOfDefaultImpl<T>(
IComparableGenerator<T> inner,
T otherValue) {
this.NextImpl_FiftyPercentOfDefaultImpl<T>(
inner,
otherValue,
generator => generator.NextLessThan
);
}
[Theory]
[MemberData(nameof(DefaultReturningGenerators))]
public void NextLessThanOrEqualTo_InnerReturnsDefault(Object inner) {
this.InvokeGenericMethod(
nameof(NextLessThanOrEqualTo_InnerReturnsDefaultImpl),
inner
);
}
protected void NextLessThanOrEqualTo_InnerReturnsDefaultImpl<T>(
DefaultGenerator<T> inner) {
var generator = this.MaybeDefaultComparable<T>(inner, inner.DefaultValue);
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
var value = generator.NextLessThanOrEqualTo(inner.DefaultValue);
Assert.Equal(inner.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(inner.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(inner.DefaultValue, value));
}
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextLessThanOrEqualTo_InnerFailsButDefaultOk_Struct(decimal percentage) {
var inner = new FakeStructGenerator(new Int32Generator(5, 10));
this.NextLessThanOrEqualTo_InnerFailsButDefaultOk_StructImpl(
this.MaybeDefaultComparable(
inner,
percentage
)
);
this.NextLessThanOrEqualTo_InnerFailsButDefaultOk_StructImpl(
this.MaybeDefaultComparable(
inner,
new FakeStruct { Value = -100 },
percentage
)
);
}
private void NextLessThanOrEqualTo_InnerFailsButDefaultOk_StructImpl(
MaybeDefaultComparableGenerator<FakeStruct> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// 2 is less than the range of FakeStructGenerator,
// but greater then default (which is 0).
var value = generator.NextLessThanOrEqualTo(new FakeStruct { Value = 2 });
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
// the default is consider equal to itself, so should return default(T)
value = generator.NextLessThanOrEqualTo(generator.DefaultValue);
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
}
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextLessThanOrEqualTo_InnerFailsButDefaultOk_Class(decimal percentage) {
var inner = new FakeClassGenerator(new Int32Generator(5, 10));
this.NextLessThanOrEqualTo_InnerFailsButDefaultOk_ClassImpl(
this.MaybeDefaultComparable(
inner,
percentage
)
);
this.NextLessThanOrEqualTo_InnerFailsButDefaultOk_ClassImpl(
this.MaybeDefaultComparable(
inner,
new FakeClass(-100),
percentage
)
);
}
protected void NextLessThanOrEqualTo_InnerFailsButDefaultOk_ClassImpl(
MaybeDefaultComparableGenerator<FakeClass> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// 2 is less than the range of FakeClassGenerator,
// but greater then default (which is null).
// null is always considered "less" than non-null values.
var value = generator.NextLessThanOrEqualTo(new FakeClass(2));
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
// the default is consider equal to itself, so should return default(T)
value = generator.NextLessThanOrEqualTo(generator.DefaultValue);
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
}
}
public static IEnumerable<object[]> NextLessThanOrEqualTo_FiftyPercentOfDefault_Data {
get {
return new List<object[]> {
new object[] {
new FakeStructGenerator(new Int32Generator(2, 5)),
new FakeStruct { Value = 3 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(2, 5)),
new FakeStruct { Value = 5 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(2, 5)),
new FakeStruct { Value = 2 }
},
new object[] {
new FakeClassGenerator(new Int32Generator(2, 5)),
new FakeClass(3)
},
new object[] {
new FakeClassGenerator(new Int32Generator(2, 5)),
new FakeClass(5)
},
new object[] {
new FakeClassGenerator(new Int32Generator(2, 5)),
new FakeClass(2)
}
};
}
}
[Theory]
[MemberData(nameof(NextLessThanOrEqualTo_FiftyPercentOfDefault_Data))]
public void NextLessThanOrEqualTo_FiftyPercentOfDefault(Object inner, Object otherValue) {
this.InvokeGenericMethod(
nameof(NextLessThanOrEqualTo_FiftyPercentOfDefaultImpl),
inner,
otherValue
);
}
protected void NextLessThanOrEqualTo_FiftyPercentOfDefaultImpl<T>(
IComparableGenerator<T> inner,
T otherValue) {
this.NextImpl_FiftyPercentOfDefaultImpl<T>(
inner,
otherValue,
generator => generator.NextLessThanOrEqualTo
);
}
[Theory]
[MemberData(nameof(DefaultReturningGenerators))]
public void NextGreaterThanOrEqualTo_InnerReturnsDefault(Object inner) {
this.InvokeGenericMethod(
nameof(NextGreaterThanOrEqualTo_InnerReturnsDefaultImpl),
inner
);
}
protected void NextGreaterThanOrEqualTo_InnerReturnsDefaultImpl<T>(
DefaultGenerator<T> inner) {
var generator = this.MaybeDefaultComparable<T>(inner, inner.DefaultValue);
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
var value = generator.NextGreaterThanOrEqualTo(inner.DefaultValue);
Assert.Equal(inner.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(inner.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(inner.DefaultValue, value));
}
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextGreaterThanOrEqualTo_InnerFailsButDefaultOk(decimal percentage) {
var inner = new FakeStructGenerator(new Int32Generator(-10, -5));
this.NextGreaterThanOrEqualTo_InnerFailsButDefaultOkImpl(
this.MaybeDefaultComparable(
inner,
percentage
)
);
this.NextGreaterThanOrEqualTo_InnerFailsButDefaultOkImpl(
this.MaybeDefaultComparable(
inner,
new FakeStruct { Value = 100 },
percentage
)
);
}
protected void NextGreaterThanOrEqualTo_InnerFailsButDefaultOkImpl(
MaybeDefaultComparableGenerator<FakeStruct> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// -5 is less than the range of FakeStructGenerator,
// but greater then default (which is 0).
var value = generator.NextGreaterThanOrEqualTo(new FakeStruct { Value = -2 });
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
// the default is consider equal to itself, so should return default(T)
value = generator.NextGreaterThanOrEqualTo(generator.DefaultValue);
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
}
}
public static IEnumerable<object[]> NextGreaterThanOrEqualTo_FiftyPercentOfDefault_Data {
get {
return new List<object[]> {
new object[] {
new FakeStructGenerator(new Int32Generator(-4, -1)),
new FakeStruct { Value = -5 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(-4, -1)),
new FakeStruct { Value = -3 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(-4, -1)),
new FakeStruct { Value = -2 }
}
};
}
}
[Theory]
[MemberData(nameof(NextGreaterThanOrEqualTo_FiftyPercentOfDefault_Data))]
public void NextGreaterThanOrEqualTo_FiftyPercentOfDefault(Object inner, Object otherValue) {
this.InvokeGenericMethod(
nameof(NextGreaterThanOrEqualTo_FiftyPercentOfDefaultImpl),
inner,
otherValue
);
}
protected void NextGreaterThanOrEqualTo_FiftyPercentOfDefaultImpl<T>(
IComparableGenerator<T> inner,
T otherValue) {
this.NextImpl_FiftyPercentOfDefaultImpl<T>(
inner,
otherValue,
generator => generator.NextGreaterThanOrEqualTo
);
}
[Theory]
[MemberData(nameof(DefaultReturningGenerators))]
public void NextGreaterThan_InnerReturnsDefault(Object inner) {
this.InvokeGenericMethod(
nameof(NextGreaterThan_InnerReturnsDefaultImpl),
inner
);
}
protected void NextGreaterThan_InnerReturnsDefaultImpl<T>(DefaultGenerator<T> inner) {
var generator = this.MaybeDefaultComparable<T>(inner, inner.DefaultValue);
Assert.Throws<UnableToGenerateValueException>(
() => generator.NextGreaterThan(inner.DefaultValue)
);
}
[Theory]
[MemberData(nameof(IgnoredPercentages))]
public void NextGreaterThan_InnerFailsButDefaultOk(decimal percentage) {
var inner = new FakeStructGenerator(new Int32Generator(-10, -5));
var generator = this.MaybeDefaultComparable(inner, percentage);
this.NextGreaterThan_InnerFailsButDefaultOkImpl(
this.MaybeDefaultComparable(
inner,
percentage
)
);
this.NextGreaterThan_InnerFailsButDefaultOkImpl(
this.MaybeDefaultComparable(
inner,
new FakeStruct { Value = 5 },
percentage
)
);
}
private void NextGreaterThan_InnerFailsButDefaultOkImpl(
MaybeDefaultComparableGenerator<FakeStruct> generator) {
for (var attempt = 0; attempt < numberOfAttempts; attempt++) {
// -5 is less than the range of FakeStructGenerator,
// but greater then default (which is 0).
var value = generator.NextGreaterThan(new FakeStruct { Value = -2 });
Assert.Equal(generator.DefaultValue, value);
Assert.True(generator.EqualityComparer.Equals(generator.DefaultValue, value));
Assert.Equal(0, generator.Comparer.Compare(generator.DefaultValue, value));
}
}
public static IEnumerable<object[]> NextGreaterThan_FiftyPercentOfDefault_Data {
get {
return new List<object[]> {
new object[] {
new FakeStructGenerator(new Int32Generator(-4, -1)),
new FakeStruct { Value = -5 }
},
new object[] {
new FakeStructGenerator(new Int32Generator(-4, -1)),
new FakeStruct { Value = -3 }
}
};
}
}
[Theory]
[MemberData(nameof(NextGreaterThan_FiftyPercentOfDefault_Data))]
public void NextGreaterThan_FiftyPercentOfDefault(Object inner, Object otherValue) {
this.InvokeGenericMethod(
nameof(NextGreaterThan_FiftyPercentOfDefaultImpl),
inner,
otherValue
);
}
protected void NextGreaterThan_FiftyPercentOfDefaultImpl<T>(
IComparableGenerator<T> inner,
T otherValue) {
this.NextImpl_FiftyPercentOfDefaultImpl<T>(
inner,
otherValue,
generator => generator.NextGreaterThan
);
}
private void NextImpl_FiftyPercentOfDefaultImpl<T>(
IComparableGenerator<T> inner,
T otherValue,
Func<IComparableGenerator<T>, Func<T, T>> getNextImpl) {
const decimal percentage = 0.5m;
var generator = this.MaybeDefaultComparable<T>(inner, percentage);
var nextImpl = getNextImpl(generator);
var hasDefault = false;
var hasNonDefault = false;
for (var attempt = 0; attempt < extendedNumberOfAttempts; attempt++) {
var value = nextImpl(otherValue);
if (!hasDefault) {
hasDefault = inner.EqualityComparer.Equals(value, generator.DefaultValue);
}
if (!hasNonDefault) {
hasNonDefault = !inner.EqualityComparer.Equals(value, generator.DefaultValue);
}
if (hasDefault && hasNonDefault) {
break;
}
}
Assert.True(
hasDefault,
$"After {extendedNumberOfAttempts:N0} attempts with a {percentage * 100}% " +
$"percentage chance of generating default values, the generator did not " +
$"generate a default value. The randomization approach is unbalanced."
);
Assert.True(
hasNonDefault,
$"After {extendedNumberOfAttempts:N0} attempts with a {percentage * 100}% " +
$"percentage chance of generating default values, the generator did not " +
$"generate a non-default value. The randomization approach is unbalanced."
);
}
}
}
| |
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the MIT License (MIT).
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Windows.Graphics.Imaging;
using Windows.Media;
using Windows.Media.Capture;
using Windows.Media.FaceAnalysis;
using Windows.Media.MediaProperties;
using Windows.System.Threading;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Media.Imaging;
using Windows.UI.Xaml.Navigation;
using Windows.UI.Xaml.Shapes;
namespace SDKTemplate
{
/// <summary>
/// Page for demonstrating FaceTracking.
/// </summary>
public sealed partial class TrackFacesInWebcam : Page
{
/// <summary>
/// Brush for drawing the bounding box around each identified face.
/// </summary>
private readonly SolidColorBrush lineBrush = new SolidColorBrush(Windows.UI.Colors.Yellow);
/// <summary>
/// Thickness of the face bounding box lines.
/// </summary>
private readonly double lineThickness = 2.0;
/// <summary>
/// Transparent fill for the bounding box.
/// </summary>
private readonly SolidColorBrush fillBrush = new SolidColorBrush(Windows.UI.Colors.Transparent);
/// <summary>
/// Reference back to the "root" page of the app.
/// </summary>
private MainPage rootPage;
/// <summary>
/// Holds the current scenario state value.
/// </summary>
private ScenarioState currentState;
/// <summary>
/// References a MediaCapture instance; is null when not in Streaming state.
/// </summary>
private MediaCapture mediaCapture;
/// <summary>
/// Cache of properties from the current MediaCapture device which is used for capturing the preview frame.
/// </summary>
private VideoEncodingProperties videoProperties;
/// <summary>
/// References a FaceTracker instance.
/// </summary>
private FaceTracker faceTracker;
/// <summary>
/// A periodic timer to execute FaceTracker on preview frames
/// </summary>
private ThreadPoolTimer frameProcessingTimer;
/// <summary>
/// Semaphore to ensure FaceTracking logic only executes one at a time
/// </summary>
private SemaphoreSlim frameProcessingSemaphore = new SemaphoreSlim(1);
/// <summary>
/// Initializes a new instance of the <see cref="TrackFacesInWebcam"/> class.
/// </summary>
public TrackFacesInWebcam()
{
this.InitializeComponent();
this.currentState = ScenarioState.Idle;
App.Current.Suspending += this.OnSuspending;
}
/// <summary>
/// Values for identifying and controlling scenario states.
/// </summary>
private enum ScenarioState
{
/// <summary>
/// Display is blank - default state.
/// </summary>
Idle,
/// <summary>
/// Webcam is actively engaged and a live video stream is displayed.
/// </summary>
Streaming
}
/// <summary>
/// Responds when we navigate to this page.
/// </summary>
/// <param name="e">Event data</param>
protected override async void OnNavigatedTo(NavigationEventArgs e)
{
this.rootPage = MainPage.Current;
// The 'await' operation can only be used from within an async method but class constructors
// cannot be labeled as async, and so we'll initialize FaceTracker here.
if (this.faceTracker == null)
{
this.faceTracker = await FaceTracker.CreateAsync();
}
}
/// <summary>
/// Responds to App Suspend event to stop/release MediaCapture object if it's running and return to Idle state.
/// </summary>
/// <param name="sender">The source of the Suspending event</param>
/// <param name="e">Event data</param>
private void OnSuspending(object sender, Windows.ApplicationModel.SuspendingEventArgs e)
{
if (this.currentState == ScenarioState.Streaming)
{
var deferral = e.SuspendingOperation.GetDeferral();
try
{
this.ChangeScenarioState(ScenarioState.Idle);
}
finally
{
deferral.Complete();
}
}
}
/// <summary>
/// Initializes a new MediaCapture instance and starts the Preview streaming to the CamPreview UI element.
/// </summary>
/// <returns>Async Task object returning true if initialization and streaming were successful and false if an exception occurred.</returns>
private async Task<bool> StartWebcamStreaming()
{
bool successful = true;
try
{
this.mediaCapture = new MediaCapture();
// For this scenario, we only need Video (not microphone) so specify this in the initializer.
// NOTE: the appxmanifest only declares "webcam" under capabilities and if this is changed to include
// microphone (default constructor) you must add "microphone" to the manifest or initialization will fail.
MediaCaptureInitializationSettings settings = new MediaCaptureInitializationSettings();
settings.StreamingCaptureMode = StreamingCaptureMode.Video;
await this.mediaCapture.InitializeAsync(settings);
this.mediaCapture.Failed += this.MediaCapture_CameraStreamFailed;
// Cache the media properties as we'll need them later.
var deviceController = this.mediaCapture.VideoDeviceController;
this.videoProperties = deviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview) as VideoEncodingProperties;
// Immediately start streaming to our CaptureElement UI.
// NOTE: CaptureElement's Source must be set before streaming is started.
this.CamPreview.Source = this.mediaCapture;
await this.mediaCapture.StartPreviewAsync();
// Ensure the Semaphore is in the signalled state.
this.frameProcessingSemaphore.Release();
// Use a 66 milisecond interval for our timer, i.e. 15 frames per second
TimeSpan timerInterval = TimeSpan.FromMilliseconds(66);
this.frameProcessingTimer = Windows.System.Threading.ThreadPoolTimer.CreatePeriodicTimer(new Windows.System.Threading.TimerElapsedHandler(ProcessCurrentVideoFrame), timerInterval);
}
catch (System.UnauthorizedAccessException)
{
// If the user has disabled their webcam this exception is thrown; provide a descriptive message to inform the user of this fact.
this.rootPage.NotifyUser("Webcam is disabled or access to the webcam is disabled for this app.\nEnsure Privacy Settings allow webcam usage.", NotifyType.ErrorMessage);
successful = false;
}
catch (Exception ex)
{
this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
successful = false;
}
return successful;
}
/// <summary>
/// Safely stops webcam streaming (if running) and releases MediaCapture object.
/// </summary>
private async void ShutdownWebCam()
{
if(this.frameProcessingTimer != null)
{
this.frameProcessingTimer.Cancel();
}
if (this.mediaCapture != null)
{
if (this.mediaCapture.CameraStreamState == Windows.Media.Devices.CameraStreamState.Streaming)
{
try
{
await this.mediaCapture.StopPreviewAsync();
}
catch(Exception)
{
; // Since we're going to destroy the MediaCapture object there's nothing to do here
}
}
this.mediaCapture.Dispose();
}
this.frameProcessingTimer = null;
this.CamPreview.Source = null;
this.mediaCapture = null;
this.CameraStreamingButton.IsEnabled = true;
}
/// <summary>
/// This method is invoked by a ThreadPoolTimer to execute the FaceTracker and Visualization logic at approximately 15 frames per second.
/// </summary>
/// <remarks>
/// Keep in mind this method is called from a Timer and not sychronized with the camera stream. Also, the processing time of FaceTracker
/// will vary depending on the size of each frame and the number of faces being tracked. That is, a large image with several tracked faces may
/// take longer to process.
/// </remarks>
/// <param name="timer">Timer object invoking this call</param>
private async void ProcessCurrentVideoFrame(ThreadPoolTimer timer)
{
if (this.currentState != ScenarioState.Streaming)
{
return;
}
// If a lock is being held it means we're still waiting for processing work on the previous frame to complete.
// In this situation, don't wait on the semaphore but exit immediately.
if (!frameProcessingSemaphore.Wait(0))
{
return;
}
try
{
IList<DetectedFace> faces = null;
// Create a VideoFrame object specifying the pixel format we want our capture image to be (NV12 bitmap in this case).
// GetPreviewFrame will convert the native webcam frame into this format.
const BitmapPixelFormat InputPixelFormat = BitmapPixelFormat.Nv12;
using (VideoFrame previewFrame = new VideoFrame(InputPixelFormat, (int)this.videoProperties.Width, (int)this.videoProperties.Height))
{
await this.mediaCapture.GetPreviewFrameAsync(previewFrame);
// The returned VideoFrame should be in the supported NV12 format but we need to verify this.
if (FaceDetector.IsBitmapPixelFormatSupported(previewFrame.SoftwareBitmap.BitmapPixelFormat))
{
faces = await this.faceTracker.ProcessNextFrameAsync(previewFrame);
}
else
{
throw new System.NotSupportedException("PixelFormat '" + InputPixelFormat.ToString() + "' is not supported by FaceDetector");
}
// Create our visualization using the frame dimensions and face results but run it on the UI thread.
var previewFrameSize = new Windows.Foundation.Size(previewFrame.SoftwareBitmap.PixelWidth, previewFrame.SoftwareBitmap.PixelHeight);
var ignored = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
this.SetupVisualization(previewFrameSize, faces);
});
}
}
catch (Exception ex)
{
var ignored = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
this.rootPage.NotifyUser(ex.ToString(), NotifyType.ErrorMessage);
});
}
finally
{
frameProcessingSemaphore.Release();
}
}
/// <summary>
/// Takes the webcam image and FaceTracker results and assembles the visualization onto the Canvas.
/// </summary>
/// <param name="framePizelSize">Width and height (in pixels) of the video capture frame</param>
/// <param name="foundFaces">List of detected faces; output from FaceTracker</param>
private void SetupVisualization(Windows.Foundation.Size framePizelSize, IList<DetectedFace> foundFaces)
{
this.VisualizationCanvas.Children.Clear();
double actualWidth = this.VisualizationCanvas.ActualWidth;
double actualHeight = this.VisualizationCanvas.ActualHeight;
if (this.currentState == ScenarioState.Streaming && foundFaces != null && actualWidth != 0 && actualHeight != 0)
{
double widthScale = framePizelSize.Width / actualWidth;
double heightScale = framePizelSize.Height / actualHeight;
foreach (DetectedFace face in foundFaces)
{
// Create a rectangle element for displaying the face box but since we're using a Canvas
// we must scale the rectangles according to the frames's actual size.
Rectangle box = new Rectangle();
box.Width = (uint)(face.FaceBox.Width / widthScale);
box.Height = (uint)(face.FaceBox.Height / heightScale);
box.Fill = this.fillBrush;
box.Stroke = this.lineBrush;
box.StrokeThickness = this.lineThickness;
box.Margin = new Thickness((uint)(face.FaceBox.X / widthScale), (uint)(face.FaceBox.Y / heightScale), 0, 0);
this.VisualizationCanvas.Children.Add(box);
}
}
}
/// <summary>
/// Manages the scenario's internal state. Invokes the internal methods and updates the UI according to the
/// passed in state value. Handles failures and resets the state if necessary.
/// </summary>
/// <param name="newState">State to switch to</param>
private async void ChangeScenarioState(ScenarioState newState)
{
// Disable UI while state change is in progress
this.CameraStreamingButton.IsEnabled = false;
switch (newState)
{
case ScenarioState.Idle:
this.ShutdownWebCam();
this.VisualizationCanvas.Children.Clear();
this.CameraStreamingButton.Content = "Start Streaming";
this.currentState = newState;
break;
case ScenarioState.Streaming:
if (!await this.StartWebcamStreaming())
{
this.ChangeScenarioState(ScenarioState.Idle);
break;
}
this.VisualizationCanvas.Children.Clear();
this.CameraStreamingButton.Content = "Stop Streaming";
this.currentState = newState;
this.CameraStreamingButton.IsEnabled = true;
break;
}
}
/// <summary>
/// Handles MediaCapture stream failures by shutting down streaming and returning to Idle state.
/// </summary>
/// <param name="sender">The source of the event, i.e. our MediaCapture object</param>
/// <param name="args">Event data</param>
private void MediaCapture_CameraStreamFailed(MediaCapture sender, object args)
{
// MediaCapture is not Agile and so we cannot invoke its methods on this caller's thread
// and instead need to schedule the state change on the UI thread.
var ignored = this.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
{
ChangeScenarioState(ScenarioState.Idle);
});
}
/// <summary>
/// Handles "streaming" button clicks to start/stop webcam streaming.
/// </summary>
/// <param name="sender">Button user clicked</param>
/// <param name="e">Event data</param>
private void CameraStreamingButton_Click(object sender, RoutedEventArgs e)
{
if (this.currentState == ScenarioState.Streaming)
{
this.rootPage.NotifyUser(string.Empty, NotifyType.StatusMessage);
this.ChangeScenarioState(ScenarioState.Idle);
}
else
{
this.rootPage.NotifyUser(string.Empty, NotifyType.StatusMessage);
this.ChangeScenarioState(ScenarioState.Streaming);
}
}
}
}
| |
using UnityEngine;
using Buildron.Domain.Builds;
using Buildron.Domain.Users;
using Buildron.Domain.Mods;
using Buildron.ClassicMods.UserMod;
using UnityEngine.UI;
[RequireComponent(typeof(UserAnimationController))]
public class UserController : MonoBehaviour, IUserController
{
#region Fields
private Vector3 m_targetPosition;
private bool m_canWalk;
private bool m_canAnimate;
private IUser m_data;
private Vector3 m_spawnPosition;
private bool m_alreadyAwake;
private GameObject m_body;
private bool m_photoAlreadySet;
private UserAnimationController m_animationController;
private BuildStatus? m_currentStatus;
#endregion
#region Editor properties
public Material VisibleMaterial;
public Material InvisibleMaterial;
#endregion
#region Properties
/// <summary>
/// Gets or sets user service.
/// </summary>
public IUser Model
{
get
{
return m_data;
}
set
{
m_data = value;
m_animationController.Data = value;
UpdateFromData();
m_data.PhotoUpdated += (sender, e) => {
UpdateUserPhoto();
};
}
}
public Rigidbody Rigidbody { get; private set; }
public Collider CenterCollider { get; private set; }
public Collider TopCollider { get; private set; }
public Collider LeftCollider { get; private set; }
public Collider RightCollider { get; private set; }
public Collider BottomCollider { get; private set; }
#endregion
#region Life cycle
private void UpdateFromData()
{
if (m_alreadyAwake && m_data != null)
{
UpdateUserPhoto();
m_canWalk = true;
m_canAnimate = true;
m_targetPosition = new Vector3(m_spawnPosition.x, m_spawnPosition.y, m_spawnPosition.z + 8f);
}
}
private void Awake()
{
m_animationController = gameObject.GetComponent<UserAnimationController>();
m_body = transform.FindChild("rootJoint").gameObject;
CenterCollider = transform.FindChild("Edges/Center").GetComponent<Collider>();
TopCollider = transform.FindChild("Edges/Top").GetComponent<Collider>();
RightCollider = transform.FindChild("Edges/Right").GetComponent<Collider>();
BottomCollider = transform.FindChild("Edges/Bottom").GetComponent<Collider>();
LeftCollider = transform.FindChild("Edges/Left").GetComponent<Collider>();
Rigidbody = transform.FindChild("Edges").GetComponent<Rigidbody>();
MarkAsVisible();
Messenger.Register(gameObject,
"OnCameraZoomIn",
"OnCameraZoomOut");
}
private void Start()
{
m_spawnPosition = transform.position;
m_alreadyAwake = true;
UpdateFromData();
Mod.Context.BuildUpdated += delegate (object sender, BuildUpdatedEventArgs e)
{
UpdateFromData();
};
}
private void UpdateUserPhoto()
{
var photo = m_data.Photo;
if (!m_photoAlreadySet && photo != null)
{
m_photoAlreadySet = true;
var photoHolder = transform.FindChild("Canvas/Photo").GetComponent<Image>();
photoHolder.enabled = true;
photoHolder.sprite = photo.ToSprite();
}
}
private void Update()
{
if (m_canWalk)
{
if (Vector3.Distance(transform.position, m_targetPosition) > 2)
{
if (!GetComponent<Animation>().isPlaying)
{
GetComponent<Animation>().CrossFade("walk");
}
transform.position = Vector3.Lerp(transform.position, m_targetPosition, Time.deltaTime * 0.4f);
}
else if (m_canAnimate)
{
m_animationController.Play();
m_canWalk = false;
}
}
}
private void MarkAsVisible()
{
m_body.GetComponent<Renderer>().material = VisibleMaterial;
}
private void MarkAsInvisible()
{
m_body.GetComponent<Renderer>().material = InvisibleMaterial;
}
private void OnCollisionEnter(Collision collision)
{
if (collision.gameObject.tag.Equals("User"))
{
m_canWalk = false;
GetComponent<Animation>().Play("idle");
}
}
private void OnCameraZoomIn()
{
MarkAsInvisible();
}
private void OnCameraZoomOut()
{
MarkAsVisible();
}
#endregion
#region Methods
public static bool ExistsGameObject(IUser buildUser)
{
return GameObject.Find(buildUser.UserName) != null;
}
public static GameObject GetGameObject(IUser buildUser)
{
return GetGameObject(buildUser.UserName);
}
public static GameObject GetGameObject(string userName)
{
return GameObject.Find(userName.ToLowerInvariant());
}
public static GameObject[] GetAllGameObjects()
{
return GameObject.FindGameObjectsWithTag("User");
}
public static GameObject CreateGameObject(IUser buildUser)
{
var go = GameObject.Find(buildUser.UserName);
if (go == null)
{
var userPrefab = Mod.Context.Assets.Load ("UserPrefab");
go = Mod.Context.GameObjects.Create(userPrefab);
var controller = go.GetComponent<UserController>();
controller.Model = buildUser;
go.name = buildUser.UserName.ToLowerInvariant();
}
return go;
}
#endregion
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Data.Entity.Interception
{
using System.Data.Common;
using System.Data.Entity.Core;
using System.Data.Entity.Core.Common;
using System.Data.Entity.Core.EntityClient;
using System.Data.Entity.Core.Objects;
using System.Data.Entity.Infrastructure;
using System.Data.Entity.Infrastructure.DependencyResolution;
using System.Data.Entity.Infrastructure.Interception;
using System.Data.Entity.SqlServer;
using System.Data.Entity.TestHelpers;
using System.Data.SqlClient;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Moq;
using Xunit;
public class CommitFailureTests : FunctionalTestBase
{
[Fact]
public void No_TransactionHandler_and_no_ExecutionStrategy_throws_CommitFailedException_on_commit_fail()
{
Execute_commit_failure_test(
c => Assert.Throws<DataException>(() => c()).InnerException.ValidateMessage("CommitFailed"),
c => Assert.Throws<CommitFailedException>(() => c()).ValidateMessage("CommitFailed"),
expectedBlogs: 1,
useTransactionHandler: false,
useExecutionStrategy: false,
rollbackOnFail: true);
}
[Fact]
public void No_TransactionHandler_and_no_ExecutionStrategy_throws_CommitFailedException_on_false_commit_fail()
{
Execute_commit_failure_test(
c => Assert.Throws<DataException>(() => c()).InnerException.ValidateMessage("CommitFailed"),
c => Assert.Throws<CommitFailedException>(() => c()).ValidateMessage("CommitFailed"),
expectedBlogs: 2,
useTransactionHandler: false,
useExecutionStrategy: false,
rollbackOnFail: false);
}
[Fact]
[UseDefaultExecutionStrategy]
public void TransactionHandler_and_no_ExecutionStrategy_rethrows_original_exception_on_commit_fail()
{
Execute_commit_failure_test(
c => Assert.Throws<TimeoutException>(() => c()),
c =>
{
var exception = Assert.Throws<EntityException>(() => c());
Assert.IsType<TimeoutException>(exception.InnerException);
},
expectedBlogs: 1,
useTransactionHandler: true,
useExecutionStrategy: false,
rollbackOnFail: true);
}
[Fact]
public void TransactionHandler_and_no_ExecutionStrategy_does_not_throw_on_false_commit_fail()
{
Execute_commit_failure_test(
c => c(),
c => c(),
expectedBlogs: 2,
useTransactionHandler: true,
useExecutionStrategy: false,
rollbackOnFail: false);
}
[Fact]
public void No_TransactionHandler_and_ExecutionStrategy_throws_CommitFailedException_on_commit_fail()
{
Execute_commit_failure_test(
c => Assert.Throws<DataException>(() => c()).InnerException.ValidateMessage("CommitFailed"),
c => Assert.Throws<CommitFailedException>(() => c()).ValidateMessage("CommitFailed"),
expectedBlogs: 1,
useTransactionHandler: false,
useExecutionStrategy: true,
rollbackOnFail: true);
}
[Fact]
public void No_TransactionHandler_and_ExecutionStrategy_throws_CommitFailedException_on_false_commit_fail()
{
Execute_commit_failure_test(
c => Assert.Throws<DataException>(() => c()).InnerException.ValidateMessage("CommitFailed"),
c => Assert.Throws<CommitFailedException>(() => c()).ValidateMessage("CommitFailed"),
expectedBlogs: 2,
useTransactionHandler: false,
useExecutionStrategy: true,
rollbackOnFail: false);
}
[Fact]
public void TransactionHandler_and_ExecutionStrategy_retries_on_commit_fail()
{
Execute_commit_failure_test(
c => c(),
c => c(),
expectedBlogs: 2,
useTransactionHandler: true,
useExecutionStrategy: true,
rollbackOnFail: true);
}
private void Execute_commit_failure_test(
Action<Action> verifyInitialization, Action<Action> verifySaveChanges, int expectedBlogs, bool useTransactionHandler,
bool useExecutionStrategy, bool rollbackOnFail)
{
var failingTransactionInterceptorMock = new Mock<FailingTransactionInterceptor> { CallBase = true };
var failingTransactionInterceptor = failingTransactionInterceptorMock.Object;
DbInterception.Add(failingTransactionInterceptor);
if (useTransactionHandler)
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(), null, null));
}
var isSqlAzure = DatabaseTestHelpers.IsSqlAzure(ModelHelpers.BaseConnectionString);
if (useExecutionStrategy)
{
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key =>
(Func<IDbExecutionStrategy>)
(() => isSqlAzure
? new TestSqlAzureExecutionStrategy()
: (IDbExecutionStrategy)
new SqlAzureExecutionStrategy(maxRetryCount: 2, maxDelay: TimeSpan.FromMilliseconds(1))));
}
try
{
using (var context = new BlogContextCommit())
{
context.Database.Delete();
failingTransactionInterceptor.ShouldFailTimes = 1;
failingTransactionInterceptor.ShouldRollBack = rollbackOnFail;
verifyInitialization(() => context.Blogs.Count());
failingTransactionInterceptor.ShouldFailTimes = 0;
Assert.Equal(1, context.Blogs.Count());
failingTransactionInterceptor.ShouldFailTimes = 1;
context.Blogs.Add(new BlogContext.Blog());
verifySaveChanges(() => context.SaveChanges());
var expectedCommitCount = useTransactionHandler
? useExecutionStrategy
? 6
: rollbackOnFail
? 4
: 3
: 4;
failingTransactionInterceptorMock.Verify(
m => m.Committing(It.IsAny<DbTransaction>(), It.IsAny<DbTransactionInterceptionContext>()),
isSqlAzure
? Times.AtLeast(expectedCommitCount)
: Times.Exactly(expectedCommitCount));
}
using (var context = new BlogContextCommit())
{
Assert.Equal(expectedBlogs, context.Blogs.Count());
using (var transactionContext = new TransactionContext(context.Database.Connection))
{
using (var infoContext = GetInfoContext(transactionContext))
{
Assert.True(
!infoContext.TableExists("__Transactions")
|| !transactionContext.Transactions.Any());
}
}
}
}
finally
{
DbInterception.Remove(failingTransactionInterceptor);
MutableResolver.ClearResolvers();
}
DbDispatchersHelpers.AssertNoInterceptors();
}
[Fact]
public void TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail()
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(), null, null));
TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail_implementation(
context => context.SaveChanges());
}
#if !NET40
[Fact]
public void TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail_async()
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(), null, null));
TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail_implementation(
context => context.SaveChangesAsync().Wait());
}
#endif
[Fact]
public void TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail_with_custom_TransactionContext()
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(c => new MyTransactionContext(c)), null, null));
TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail_implementation(
context =>
{
context.SaveChanges();
using (var infoContext = GetInfoContext(context))
{
Assert.True(infoContext.TableExists("MyTransactions"));
var column = infoContext.Columns.Single(c => c.Name == "Time");
Assert.Equal("datetime2", column.Type);
}
});
}
public class MyTransactionContext : TransactionContext
{
public MyTransactionContext(DbConnection connection)
: base(connection)
{
}
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<TransactionRow>()
.ToTable("MyTransactions")
.HasKey(e => e.Id)
.Property(e => e.CreationTime).HasColumnName("Time").HasColumnType("datetime2");
}
}
private void TransactionHandler_and_ExecutionStrategy_does_not_retry_on_false_commit_fail_implementation(
Action<BlogContextCommit> runAndVerify)
{
var failingTransactionInterceptorMock = new Mock<FailingTransactionInterceptor> { CallBase = true };
var failingTransactionInterceptor = failingTransactionInterceptorMock.Object;
DbInterception.Add(failingTransactionInterceptor);
var isSqlAzure = DatabaseTestHelpers.IsSqlAzure(ModelHelpers.BaseConnectionString);
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key =>
(Func<IDbExecutionStrategy>)
(() => isSqlAzure
? new TestSqlAzureExecutionStrategy()
: (IDbExecutionStrategy)new SqlAzureExecutionStrategy(maxRetryCount: 2, maxDelay: TimeSpan.FromMilliseconds(1))));
try
{
using (var context = new BlogContextCommit())
{
failingTransactionInterceptor.ShouldFailTimes = 0;
context.Database.Delete();
Assert.Equal(1, context.Blogs.Count());
failingTransactionInterceptor.ShouldFailTimes = 2;
failingTransactionInterceptor.ShouldRollBack = false;
context.Blogs.Add(new BlogContext.Blog());
runAndVerify(context);
failingTransactionInterceptorMock.Verify(
m => m.Committing(It.IsAny<DbTransaction>(), It.IsAny<DbTransactionInterceptionContext>()),
isSqlAzure
? Times.AtLeast(3)
: Times.Exactly(3));
}
using (var context = new BlogContextCommit())
{
Assert.Equal(2, context.Blogs.Count());
using (var transactionContext = new TransactionContext(context.Database.Connection))
{
using (var infoContext = GetInfoContext(transactionContext))
{
Assert.True(
!infoContext.TableExists("__Transactions")
|| !transactionContext.Transactions.Any());
}
}
}
}
finally
{
DbInterception.Remove(failingTransactionInterceptorMock.Object);
MutableResolver.ClearResolvers();
}
DbDispatchersHelpers.AssertNoInterceptors();
}
[Fact]
public void CommitFailureHandler_Dispose_does_not_use_ExecutionStrategy()
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
c.TransactionHandler.Dispose();
executionStrategyMock.Verify(e => e.Execute(It.IsAny<Func<int>>()), Times.Exactly(3));
});
}
[Fact]
public void CommitFailureHandler_Dispose_catches_exceptions()
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
using (var transactionContext = new TransactionContext(((EntityConnection)c.Connection).StoreConnection))
{
foreach (var tran in transactionContext.Set<TransactionRow>().ToList())
{
transactionContext.Transactions.Remove(tran);
}
transactionContext.SaveChanges();
}
c.TransactionHandler.Dispose();
});
}
[Fact]
public void CommitFailureHandler_prunes_transactions_after_set_amount()
{
CommitFailureHandler_prunes_transactions_after_set_amount_implementation(false);
}
[Fact]
public void CommitFailureHandler_prunes_transactions_after_set_amount_and_handles_false_failure()
{
CommitFailureHandler_prunes_transactions_after_set_amount_implementation(true);
}
private void CommitFailureHandler_prunes_transactions_after_set_amount_implementation(bool shouldThrow)
{
var failingTransactionInterceptor = new FailingTransactionInterceptor();
DbInterception.Add(failingTransactionInterceptor);
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new MyCommitFailureHandler(c => new TransactionContext(c)), null, null));
try
{
using (var context = new BlogContextCommit())
{
context.Database.Delete();
Assert.Equal(1, context.Blogs.Count());
var objectContext = ((IObjectContextAdapter)context).ObjectContext;
var transactionHandler = (MyCommitFailureHandler)objectContext.TransactionHandler;
for (var i = 0; i < transactionHandler.PruningLimit; i++)
{
context.Blogs.Add(new BlogContext.Blog());
context.SaveChanges();
}
AssertTransactionHistoryCount(context, transactionHandler.PruningLimit);
if (shouldThrow)
{
failingTransactionInterceptor.ShouldFailTimes = 1;
failingTransactionInterceptor.ShouldRollBack = false;
}
context.Blogs.Add(new BlogContext.Blog());
context.SaveChanges();
context.Blogs.Add(new BlogContext.Blog());
context.SaveChanges();
AssertTransactionHistoryCount(context, 1);
Assert.Equal(1, transactionHandler.TransactionContext.ChangeTracker.Entries<TransactionRow>().Count());
}
}
finally
{
DbInterception.Remove(failingTransactionInterceptor);
MutableResolver.ClearResolvers();
}
DbDispatchersHelpers.AssertNoInterceptors();
}
[Fact]
public void CommitFailureHandler_ClearTransactionHistory_uses_ExecutionStrategy()
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
((MyCommitFailureHandler)c.TransactionHandler).ClearTransactionHistory();
executionStrategyMock.Verify(e => e.Execute(It.IsAny<Func<int>>()), Times.Exactly(4));
Assert.Empty(((MyCommitFailureHandler)c.TransactionHandler).TransactionContext.ChangeTracker.Entries<TransactionRow>());
});
}
[Fact]
public void CommitFailureHandler_ClearTransactionHistory_does_not_catch_exceptions()
{
var failingTransactionInterceptor = new FailingTransactionInterceptor();
DbInterception.Add(failingTransactionInterceptor);
try
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key => (Func<IDbExecutionStrategy>)(() => new SimpleExecutionStrategy()));
failingTransactionInterceptor.ShouldFailTimes = 1;
failingTransactionInterceptor.ShouldRollBack = true;
Assert.Throws<EntityException>(
() => ((MyCommitFailureHandler)c.TransactionHandler).ClearTransactionHistory());
MutableResolver.ClearResolvers();
AssertTransactionHistoryCount(c, 1);
((MyCommitFailureHandler)c.TransactionHandler).ClearTransactionHistory();
AssertTransactionHistoryCount(c, 0);
});
}
finally
{
DbInterception.Remove(failingTransactionInterceptor);
}
}
[Fact]
public void CommitFailureHandler_PruneTransactionHistory_uses_ExecutionStrategy()
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
((MyCommitFailureHandler)c.TransactionHandler).PruneTransactionHistory();
executionStrategyMock.Verify(e => e.Execute(It.IsAny<Func<int>>()), Times.Exactly(4));
Assert.Empty(((MyCommitFailureHandler)c.TransactionHandler).TransactionContext.ChangeTracker.Entries<TransactionRow>());
});
}
[Fact]
public void CommitFailureHandler_PruneTransactionHistory_does_not_catch_exceptions()
{
var failingTransactionInterceptor = new FailingTransactionInterceptor();
DbInterception.Add(failingTransactionInterceptor);
try
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key => (Func<IDbExecutionStrategy>)(() => new SimpleExecutionStrategy()));
failingTransactionInterceptor.ShouldFailTimes = 1;
failingTransactionInterceptor.ShouldRollBack = true;
Assert.Throws<EntityException>(
() => ((MyCommitFailureHandler)c.TransactionHandler).PruneTransactionHistory());
MutableResolver.ClearResolvers();
AssertTransactionHistoryCount(c, 1);
((MyCommitFailureHandler)c.TransactionHandler).PruneTransactionHistory();
AssertTransactionHistoryCount(c, 0);
});
}
finally
{
DbInterception.Remove(failingTransactionInterceptor);
}
}
#if !NET40
[Fact]
public void CommitFailureHandler_ClearTransactionHistoryAsync_uses_ExecutionStrategy()
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
((MyCommitFailureHandler)c.TransactionHandler).ClearTransactionHistoryAsync().Wait();
executionStrategyMock.Verify(
e => e.ExecuteAsync(It.IsAny<Func<Task<int>>>(), It.IsAny<CancellationToken>()), Times.Once());
Assert.Empty(((MyCommitFailureHandler)c.TransactionHandler).TransactionContext.ChangeTracker.Entries<TransactionRow>());
});
}
[Fact]
public void CommitFailureHandler_ClearTransactionHistoryAsync_does_not_catch_exceptions()
{
var failingTransactionInterceptor = new FailingTransactionInterceptor();
DbInterception.Add(failingTransactionInterceptor);
try
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key => (Func<IDbExecutionStrategy>)(() => new SimpleExecutionStrategy()));
failingTransactionInterceptor.ShouldFailTimes = 1;
failingTransactionInterceptor.ShouldRollBack = true;
Assert.Throws<EntityException>(
() => ExceptionHelpers.UnwrapAggregateExceptions(
() => ((MyCommitFailureHandler)c.TransactionHandler).ClearTransactionHistoryAsync().Wait()));
MutableResolver.ClearResolvers();
AssertTransactionHistoryCount(c, 1);
((MyCommitFailureHandler)c.TransactionHandler).ClearTransactionHistoryAsync().Wait();
AssertTransactionHistoryCount(c, 0);
});
}
finally
{
DbInterception.Remove(failingTransactionInterceptor);
}
}
[Fact]
public void CommitFailureHandler_PruneTransactionHistoryAsync_uses_ExecutionStrategy()
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
((MyCommitFailureHandler)c.TransactionHandler).PruneTransactionHistoryAsync().Wait();
executionStrategyMock.Verify(
e => e.ExecuteAsync(It.IsAny<Func<Task<int>>>(), It.IsAny<CancellationToken>()), Times.Once());
Assert.Empty(((MyCommitFailureHandler)c.TransactionHandler).TransactionContext.ChangeTracker.Entries<TransactionRow>());
});
}
[Fact]
public void CommitFailureHandler_PruneTransactionHistoryAsync_does_not_catch_exceptions()
{
var failingTransactionInterceptor = new FailingTransactionInterceptor();
DbInterception.Add(failingTransactionInterceptor);
try
{
CommitFailureHandler_with_ExecutionStrategy_test(
(c, executionStrategyMock) =>
{
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key => (Func<IDbExecutionStrategy>)(() => new SimpleExecutionStrategy()));
failingTransactionInterceptor.ShouldFailTimes = 1;
failingTransactionInterceptor.ShouldRollBack = true;
Assert.Throws<EntityException>(
() => ExceptionHelpers.UnwrapAggregateExceptions(
() => ((MyCommitFailureHandler)c.TransactionHandler).PruneTransactionHistoryAsync().Wait()));
MutableResolver.ClearResolvers();
AssertTransactionHistoryCount(c, 1);
((MyCommitFailureHandler)c.TransactionHandler).PruneTransactionHistoryAsync().Wait();
AssertTransactionHistoryCount(c, 0);
});
}
finally
{
DbInterception.Remove(failingTransactionInterceptor);
}
}
#endif
private void CommitFailureHandler_with_ExecutionStrategy_test(
Action<ObjectContext, Mock<TestSqlAzureExecutionStrategy>> pruneAndVerify)
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new MyCommitFailureHandler(c => new TransactionContext(c)), null, null));
var executionStrategyMock = new Mock<TestSqlAzureExecutionStrategy> { CallBase = true };
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key => (Func<IDbExecutionStrategy>)(() => executionStrategyMock.Object));
try
{
using (var context = new BlogContextCommit())
{
context.Database.Delete();
Assert.Equal(1, context.Blogs.Count());
context.Blogs.Add(new BlogContext.Blog());
context.SaveChanges();
AssertTransactionHistoryCount(context, 1);
executionStrategyMock.Verify(e => e.Execute(It.IsAny<Func<int>>()), Times.Exactly(3));
#if !NET40
executionStrategyMock.Verify(
e => e.ExecuteAsync(It.IsAny<Func<Task<int>>>(), It.IsAny<CancellationToken>()), Times.Never());
#endif
var objectContext = ((IObjectContextAdapter)context).ObjectContext;
pruneAndVerify(objectContext, executionStrategyMock);
using (var transactionContext = new TransactionContext(context.Database.Connection))
{
Assert.Equal(0, transactionContext.Transactions.Count());
}
}
}
finally
{
MutableResolver.ClearResolvers();
}
}
private void AssertTransactionHistoryCount(DbContext context, int count)
{
AssertTransactionHistoryCount(((IObjectContextAdapter)context).ObjectContext, count);
}
private void AssertTransactionHistoryCount(ObjectContext context, int count)
{
using (var transactionContext = new TransactionContext(((EntityConnection)context.Connection).StoreConnection))
{
Assert.Equal(count, transactionContext.Transactions.Count());
}
}
public class SimpleExecutionStrategy : IDbExecutionStrategy
{
public bool RetriesOnFailure
{
get { return false; }
}
public virtual void Execute(Action operation)
{
operation();
}
public virtual TResult Execute<TResult>(Func<TResult> operation)
{
return operation();
}
#if !NET40
public virtual Task ExecuteAsync(Func<Task> operation, CancellationToken cancellationToken)
{
return operation();
}
public virtual Task<TResult> ExecuteAsync<TResult>(Func<Task<TResult>> operation, CancellationToken cancellationToken)
{
return operation();
}
#endif
}
public class MyCommitFailureHandler : CommitFailureHandler
{
public MyCommitFailureHandler(Func<DbConnection, TransactionContext> transactionContextFactory)
: base(transactionContextFactory)
{
}
public new void MarkTransactionForPruning(TransactionRow transaction)
{
base.MarkTransactionForPruning(transaction);
}
public new TransactionContext TransactionContext
{
get { return base.TransactionContext; }
}
public new virtual int PruningLimit
{
get { return base.PruningLimit; }
}
}
[Fact]
[UseDefaultExecutionStrategy]
public void CommitFailureHandler_supports_nested_transactions()
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(), null, null));
try
{
using (var context = new BlogContextCommit())
{
context.Database.Delete();
Assert.Equal(1, context.Blogs.Count());
context.Blogs.Add(new BlogContext.Blog());
using (var transaction = context.Database.BeginTransaction())
{
using (var innerContext = new BlogContextCommit())
{
using (var innerTransaction = innerContext.Database.BeginTransaction())
{
Assert.Equal(1, innerContext.Blogs.Count());
innerContext.Blogs.Add(new BlogContext.Blog());
innerContext.SaveChanges();
innerTransaction.Commit();
}
}
context.SaveChanges();
transaction.Commit();
}
}
using (var context = new BlogContextCommit())
{
Assert.Equal(3, context.Blogs.Count());
}
}
finally
{
MutableResolver.ClearResolvers();
}
DbDispatchersHelpers.AssertNoInterceptors();
}
[Fact]
public void BuildDatabaseInitializationScript_can_be_used_to_initialize_the_database()
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(), null, null));
MutableResolver.AddResolver<Func<IDbExecutionStrategy>>(
key => (Func<IDbExecutionStrategy>)(() => new TestSqlAzureExecutionStrategy()));
try
{
using (var context = new BlogContextCommit())
{
context.Database.Delete();
Assert.Equal(1, context.Blogs.Count());
}
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(c => new TransactionContextNoInit(c)), null, null));
using (var context = new BlogContextCommit())
{
context.Blogs.Add(new BlogContext.Blog());
Assert.Throws<EntityException>(() => context.SaveChanges());
context.Database.ExecuteSqlCommand(
TransactionalBehavior.DoNotEnsureTransaction,
((IObjectContextAdapter)context).ObjectContext.TransactionHandler.BuildDatabaseInitializationScript());
context.SaveChanges();
}
using (var context = new BlogContextCommit())
{
Assert.Equal(2, context.Blogs.Count());
}
}
finally
{
MutableResolver.ClearResolvers();
}
DbDispatchersHelpers.AssertNoInterceptors();
}
[Fact]
public void BuildDatabaseInitializationScript_can_be_used_to_initialize_the_database_if_no_migration_generator()
{
var mockDbProviderServiceResolver = new Mock<IDbDependencyResolver>();
mockDbProviderServiceResolver
.Setup(r => r.GetService(It.IsAny<Type>(), It.IsAny<string>()))
.Returns(SqlProviderServices.Instance);
MutableResolver.AddResolver<DbProviderServices>(mockDbProviderServiceResolver.Object);
var mockDbProviderFactoryResolver = new Mock<IDbDependencyResolver>();
mockDbProviderFactoryResolver
.Setup(r => r.GetService(It.IsAny<Type>(), It.IsAny<string>()))
.Returns(SqlClientFactory.Instance);
MutableResolver.AddResolver<DbProviderFactory>(mockDbProviderFactoryResolver.Object);
BuildDatabaseInitializationScript_can_be_used_to_initialize_the_database();
}
[Fact]
public void FromContext_returns_the_current_handler()
{
MutableResolver.AddResolver<Func<TransactionHandler>>(
new TransactionHandlerResolver(() => new CommitFailureHandler(), null, null));
try
{
using (var context = new BlogContextCommit())
{
context.Database.Delete();
var commitFailureHandler = CommitFailureHandler.FromContext(((IObjectContextAdapter)context).ObjectContext);
Assert.IsType<CommitFailureHandler>(commitFailureHandler);
Assert.Same(commitFailureHandler, CommitFailureHandler.FromContext(context));
}
}
finally
{
MutableResolver.ClearResolvers();
}
}
[Fact]
public void TransactionHandler_is_disposed_even_if_the_context_is_not()
{
var context = new BlogContextCommit();
context.Database.Delete();
Assert.Equal(1, context.Blogs.Count());
var weakDbContext = new WeakReference(context);
var weakObjectContext = new WeakReference(((IObjectContextAdapter)context).ObjectContext);
var weakTransactionHandler = new WeakReference(((IObjectContextAdapter)context).ObjectContext.TransactionHandler);
context = null;
GC.Collect();
GC.WaitForPendingFinalizers();
Assert.False(weakDbContext.IsAlive);
Assert.False(weakObjectContext.IsAlive);
DbDispatchersHelpers.AssertNoInterceptors();
// Need a second pass as the TransactionHandler is removed from the interceptors in the ObjectContext finalizer
GC.Collect();
Assert.False(weakTransactionHandler.IsAlive);
}
public class TransactionContextNoInit : TransactionContext
{
static TransactionContextNoInit()
{
Database.SetInitializer<TransactionContextNoInit>(null);
}
public TransactionContextNoInit(DbConnection connection)
: base(connection)
{
}
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<TransactionRow>()
.ToTable("TransactionContextNoInit");
}
}
public class FailingTransactionInterceptor : IDbTransactionInterceptor
{
private int _timesToFail;
private int _shouldFailTimes;
public int ShouldFailTimes
{
get { return _shouldFailTimes; }
set
{
_shouldFailTimes = value;
_timesToFail = value;
}
}
public bool ShouldRollBack;
public FailingTransactionInterceptor()
{
_timesToFail = ShouldFailTimes;
}
public void ConnectionGetting(DbTransaction transaction, DbTransactionInterceptionContext<DbConnection> interceptionContext)
{
}
public void ConnectionGot(DbTransaction transaction, DbTransactionInterceptionContext<DbConnection> interceptionContext)
{
}
public void IsolationLevelGetting(
DbTransaction transaction, DbTransactionInterceptionContext<IsolationLevel> interceptionContext)
{
}
public void IsolationLevelGot(DbTransaction transaction, DbTransactionInterceptionContext<IsolationLevel> interceptionContext)
{
}
public virtual void Committing(DbTransaction transaction, DbTransactionInterceptionContext interceptionContext)
{
if (_timesToFail-- > 0)
{
if (ShouldRollBack)
{
transaction.Rollback();
}
else
{
transaction.Commit();
}
interceptionContext.Exception = new TimeoutException();
}
else
{
_timesToFail = ShouldFailTimes;
}
}
public void Committed(DbTransaction transaction, DbTransactionInterceptionContext interceptionContext)
{
if (interceptionContext.Exception != null)
{
_timesToFail--;
}
}
public void Disposing(DbTransaction transaction, DbTransactionInterceptionContext interceptionContext)
{
}
public void Disposed(DbTransaction transaction, DbTransactionInterceptionContext interceptionContext)
{
}
public void RollingBack(DbTransaction transaction, DbTransactionInterceptionContext interceptionContext)
{
}
public void RolledBack(DbTransaction transaction, DbTransactionInterceptionContext interceptionContext)
{
}
}
public class BlogContextCommit : BlogContext
{
static BlogContextCommit()
{
Database.SetInitializer<BlogContextCommit>(new BlogInitializer());
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Structure;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp.Structure
{
internal static class CSharpStructureHelpers
{
public const string Ellipsis = "...";
public const string MultiLineCommentSuffix = "*/";
public const int MaxXmlDocCommentBannerLength = 120;
private static readonly char[] s_newLineCharacters = new char[] { '\r', '\n' };
private static int GetCollapsibleStart(SyntaxToken firstToken)
{
// If the *next* token has any leading comments, we use the end of the last one.
// If not, we check *this* token to see if it has any trailing comments and use the last one;
// otherwise, we use the end of this token.
var start = firstToken.Span.End;
var nextToken = firstToken.GetNextToken();
if (nextToken.Kind() != SyntaxKind.None && nextToken.HasLeadingTrivia)
{
var lastLeadingCommentTrivia = nextToken.LeadingTrivia.GetLastComment();
if (lastLeadingCommentTrivia != null)
{
start = lastLeadingCommentTrivia.Value.Span.End;
}
}
if (firstToken.HasTrailingTrivia)
{
var lastTrailingCommentOrWhitespaceTrivia = firstToken.TrailingTrivia.GetLastCommentOrWhitespace();
if (lastTrailingCommentOrWhitespaceTrivia != null)
{
start = lastTrailingCommentOrWhitespaceTrivia.Value.Span.End;
}
}
return start;
}
private static int GetCollapsibleEnd(SyntaxToken lastToken)
{
// If the token has any trailing comments, we use the end of the token;
// otherwise, we skip to the start of the first new line trivia.
var end = lastToken.Span.End;
if (lastToken.HasTrailingTrivia &&
!lastToken.TrailingTrivia.Any(SyntaxKind.SingleLineCommentTrivia, SyntaxKind.MultiLineCommentTrivia))
{
var firstNewLineTrivia = lastToken.TrailingTrivia.GetFirstNewLine();
if (firstNewLineTrivia != null)
{
end = firstNewLineTrivia.Value.SpanStart;
}
}
return end;
}
public static SyntaxToken GetLastInlineMethodBlockToken(SyntaxNode node)
{
var lastToken = node.GetLastToken(includeZeroWidth: true);
if (lastToken.Kind() == SyntaxKind.None)
{
return default(SyntaxToken);
}
// If the next token is a semicolon, and we aren't in the initializer of a for-loop, use that token as the end.
var nextToken = lastToken.GetNextToken(includeSkipped: true);
if (nextToken.Kind() != SyntaxKind.None && nextToken.Kind() == SyntaxKind.SemicolonToken)
{
var forStatement = nextToken.GetAncestor<ForStatementSyntax>();
if (forStatement != null && forStatement.FirstSemicolonToken == nextToken)
{
return default(SyntaxToken);
}
lastToken = nextToken;
}
return lastToken;
}
private static string CreateCommentBannerTextWithPrefix(string text, string prefix)
{
Contract.ThrowIfNull(text);
Contract.ThrowIfNull(prefix);
var prefixLength = prefix.Length;
return prefix + " " + text.Substring(prefixLength).Trim() + " " + Ellipsis;
}
private static string GetCommentBannerText(SyntaxTrivia comment)
{
Contract.ThrowIfFalse(comment.IsSingleLineComment() || comment.IsMultiLineComment());
if (comment.IsSingleLineComment())
{
return CreateCommentBannerTextWithPrefix(comment.ToString(), "//");
}
else if (comment.IsMultiLineComment())
{
var lineBreakStart = comment.ToString().IndexOfAny(s_newLineCharacters);
var text = comment.ToString();
if (lineBreakStart >= 0)
{
text = text.Substring(0, lineBreakStart);
}
else
{
text = text.Length >= "/**/".Length && text.EndsWith(MultiLineCommentSuffix)
? text.Substring(0, text.Length - MultiLineCommentSuffix.Length)
: text;
}
return CreateCommentBannerTextWithPrefix(text, "/*");
}
else
{
return string.Empty;
}
}
private static BlockSpan CreateCommentBlockSpan(
SyntaxTrivia startComment, SyntaxTrivia endComment)
{
var span = TextSpan.FromBounds(startComment.SpanStart, endComment.Span.End);
return new BlockSpan(
isCollapsible: true,
textSpan: span,
hintSpan: span,
type: BlockTypes.Comment,
bannerText: GetCommentBannerText(startComment),
autoCollapse: true);
}
// For testing purposes
internal static ImmutableArray<BlockSpan> CreateCommentBlockSpan(
SyntaxTriviaList triviaList)
{
var result = ArrayBuilder<BlockSpan>.GetInstance();
CollectCommentBlockSpans(triviaList, result);
return result.ToImmutableAndFree();
}
public static void CollectCommentBlockSpans(
SyntaxTriviaList triviaList, ArrayBuilder<BlockSpan> spans)
{
if (triviaList.Count > 0)
{
SyntaxTrivia? startComment = null;
SyntaxTrivia? endComment = null;
Action completeSingleLineCommentGroup = () =>
{
if (startComment != null)
{
var singleLineCommentGroupRegion = CreateCommentBlockSpan(startComment.Value, endComment.Value);
spans.Add(singleLineCommentGroupRegion);
startComment = null;
endComment = null;
}
};
// Iterate through trivia and collect the following:
// 1. Groups of contiguous single-line comments that are only separated by whitespace
// 2. Multi-line comments
foreach (var trivia in triviaList)
{
if (trivia.IsSingleLineComment())
{
startComment = startComment ?? trivia;
endComment = trivia;
}
else if (trivia.IsMultiLineComment())
{
completeSingleLineCommentGroup();
var multilineCommentRegion = CreateCommentBlockSpan(trivia, trivia);
spans.Add(multilineCommentRegion);
}
else if (!trivia.MatchesKind(SyntaxKind.WhitespaceTrivia,
SyntaxKind.EndOfLineTrivia,
SyntaxKind.EndOfFileToken))
{
completeSingleLineCommentGroup();
}
}
completeSingleLineCommentGroup();
}
}
public static void CollectCommentBlockSpans(
SyntaxNode node, ArrayBuilder<BlockSpan> spans)
{
if (node == null)
{
throw new ArgumentNullException(nameof(node));
}
var triviaList = node.GetLeadingTrivia();
CollectCommentBlockSpans(triviaList, spans);
}
private static BlockSpan CreateBlockSpan(
TextSpan textSpan, string bannerText, bool autoCollapse,
string type, bool isCollapsible)
{
return CreateBlockSpan(
textSpan, textSpan, bannerText, autoCollapse, type, isCollapsible);
}
private static BlockSpan CreateBlockSpan(
TextSpan textSpan, TextSpan hintSpan,
string bannerText, bool autoCollapse,
string type, bool isCollapsible)
{
return new BlockSpan(
textSpan: textSpan,
hintSpan: hintSpan,
bannerText: bannerText,
autoCollapse: autoCollapse,
type: type,
isCollapsible: isCollapsible);
}
public static BlockSpan CreateBlockSpan(
SyntaxNode node, string bannerText, bool autoCollapse,
string type, bool isCollapsible)
{
return CreateBlockSpan(
node.Span,
bannerText,
autoCollapse,
type,
isCollapsible);
}
public static BlockSpan? CreateBlockSpan(
SyntaxNode node, SyntaxToken syntaxToken,
string bannerText, bool autoCollapse,
string type, bool isCollapsible)
{
return CreateBlockSpan(
node, syntaxToken, node.GetLastToken(),
bannerText, autoCollapse, type, isCollapsible);
}
public static BlockSpan? CreateBlockSpan(
SyntaxNode node, SyntaxToken startToken,
int endPos, string bannerText, bool autoCollapse,
string type, bool isCollapsible)
{
// If the SyntaxToken is actually missing, don't attempt to create an outlining region.
if (startToken.IsMissing)
{
return null;
}
// Since we creating a span for everything after syntaxToken to ensure
// that it collapses properly. However, the hint span begins at the start
// of the next token so indentation in the tooltip is accurate.
var span = TextSpan.FromBounds(GetCollapsibleStart(startToken), endPos);
var hintSpan = GetHintSpan(node, endPos);
return CreateBlockSpan(
span,
hintSpan,
bannerText,
autoCollapse,
type,
isCollapsible);
}
private static TextSpan GetHintSpan(SyntaxNode node, int endPos)
{
// Don't include attributes in the BlockSpan for a node. When the user
// hovers over the indent-guide we don't want to show them the line with
// the attributes, we want to show them the line with the start of the
// actual structure.
foreach (var child in node.ChildNodesAndTokens())
{
if (child.Kind() != SyntaxKind.AttributeList)
{
return TextSpan.FromBounds(child.SpanStart, endPos);
}
}
return TextSpan.FromBounds(node.SpanStart, endPos);
}
public static BlockSpan? CreateBlockSpan(
SyntaxNode node, SyntaxToken startToken,
SyntaxToken endToken, string bannerText, bool autoCollapse,
string type, bool isCollapsible)
{
return CreateBlockSpan(
node, startToken, GetCollapsibleEnd(endToken),
bannerText, autoCollapse, type, isCollapsible);
}
public static BlockSpan CreateBlockSpan(
SyntaxNode node, bool autoCollapse, string type, bool isCollapsible)
{
return CreateBlockSpan(
node,
bannerText: Ellipsis,
autoCollapse: autoCollapse,
type: type,
isCollapsible: isCollapsible);
}
// Adds everything after 'syntaxToken' up to and including the end
// of node as a region. The snippet to display is just "..."
public static BlockSpan? CreateBlockSpan(
SyntaxNode node, SyntaxToken syntaxToken,
bool autoCollapse, string type, bool isCollapsible)
{
return CreateBlockSpan(
node, syntaxToken,
bannerText: Ellipsis,
autoCollapse: autoCollapse,
type: type,
isCollapsible: isCollapsible);
}
// Adds everything after 'syntaxToken' up to and including the end
// of node as a region. The snippet to display is just "..."
public static BlockSpan? CreateBlockSpan(
SyntaxNode node, SyntaxToken startToken, SyntaxToken endToken,
bool autoCollapse, string type, bool isCollapsible)
{
return CreateBlockSpan(
node, startToken, endToken,
bannerText: Ellipsis,
autoCollapse: autoCollapse,
type: type,
isCollapsible: isCollapsible);
}
// Adds the span surrounding the syntax list as a region. The
// snippet shown is the text from the first line of the first
// node in the list.
public static BlockSpan? CreateBlockSpan(
IEnumerable<SyntaxNode> syntaxList, bool autoCollapse,
string type, bool isCollapsible)
{
if (syntaxList.IsEmpty())
{
return null;
}
var end = GetCollapsibleEnd(syntaxList.Last().GetLastToken());
var spanStart = syntaxList.First().GetFirstToken().FullSpan.End;
var spanEnd = end >= spanStart
? end
: spanStart;
var hintSpanStart = syntaxList.First().SpanStart;
var hintSpanEnd = end >= hintSpanStart
? end
: hintSpanStart;
return CreateBlockSpan(
textSpan: TextSpan.FromBounds(spanStart, spanEnd),
hintSpan: TextSpan.FromBounds(hintSpanStart, hintSpanEnd),
bannerText: Ellipsis,
autoCollapse: autoCollapse,
type: type,
isCollapsible: isCollapsible);
}
}
}
| |
/*
XML-RPC.NET library
Copyright (c) 2001-2009, Charles Cook <charlescook@cookcomputing.com>
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*/
namespace CookComputing.XmlRpc
{
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text.RegularExpressions;
public enum XmlRpcType
{
tInvalid,
tInt32,
tBoolean,
tString,
tDouble,
tDateTime,
tBase64,
tStruct,
tHashtable,
tArray,
tMultiDimArray,
tVoid
}
public class XmlRpcServiceInfo
{
public static XmlRpcServiceInfo CreateServiceInfo(Type type)
{
XmlRpcServiceInfo svcInfo = new XmlRpcServiceInfo();
// extract service info
XmlRpcServiceAttribute svcAttr = (XmlRpcServiceAttribute)
Attribute.GetCustomAttribute(type, typeof(XmlRpcServiceAttribute));
if (svcAttr != null && svcAttr.Description != "")
svcInfo.doc = svcAttr.Description;
if (svcAttr != null && svcAttr.Name != "")
svcInfo.Name = svcAttr.Name;
else
svcInfo.Name = type.Name;
// extract method info
var methods = new Dictionary<string, XmlRpcMethodInfo>();
foreach (Type itf in type.GetInterfaces())
{
XmlRpcServiceAttribute itfAttr = (XmlRpcServiceAttribute)
Attribute.GetCustomAttribute(itf, typeof(XmlRpcServiceAttribute));
if (itfAttr != null)
svcInfo.doc = itfAttr.Description;
#if (!COMPACT_FRAMEWORK)
InterfaceMapping imap = type.GetInterfaceMap(itf);
foreach (MethodInfo mi in imap.InterfaceMethods)
{
ExtractMethodInfo(methods, mi, itf);
}
#else
foreach (MethodInfo mi in itf.GetMethods())
{
ExtractMethodInfo(methods, mi, itf);
}
#endif
}
foreach (MethodInfo mi in type.GetMethods())
{
var mthds = new List<MethodInfo>();
mthds.Add(mi);
MethodInfo curMi = mi;
while (true)
{
MethodInfo baseMi = curMi.GetBaseDefinition();
if (baseMi.DeclaringType == curMi.DeclaringType)
break;
mthds.Insert(0, baseMi);
curMi = baseMi;
}
foreach (MethodInfo mthd in mthds)
{
ExtractMethodInfo(methods, mthd, type);
}
}
svcInfo.methodInfos = new XmlRpcMethodInfo[methods.Count];
methods.Values.CopyTo(svcInfo.methodInfos, 0);
Array.Sort(svcInfo.methodInfos);
return svcInfo;
}
static void ExtractMethodInfo(Dictionary<string, XmlRpcMethodInfo> methods,
MethodInfo mi, Type type)
{
XmlRpcMethodAttribute attr = (XmlRpcMethodAttribute)
Attribute.GetCustomAttribute(mi,
typeof(XmlRpcMethodAttribute));
if (attr == null)
return;
XmlRpcMethodInfo mthdInfo = new XmlRpcMethodInfo();
mthdInfo.MethodInfo = mi;
mthdInfo.XmlRpcName = GetXmlRpcMethodName(mi);
mthdInfo.MiName = mi.Name;
mthdInfo.Doc = attr.Description;
mthdInfo.IsHidden = attr.IntrospectionMethod | attr.Hidden;
// extract parameters information
var parmList = new List<XmlRpcParameterInfo>();
ParameterInfo[] parms = mi.GetParameters();
foreach (ParameterInfo parm in parms)
{
XmlRpcParameterInfo parmInfo = new XmlRpcParameterInfo();
parmInfo.Name = parm.Name;
parmInfo.Type = parm.ParameterType;
parmInfo.XmlRpcType = GetXmlRpcTypeString(parm.ParameterType);
// retrieve optional attributed info
parmInfo.Doc = "";
XmlRpcParameterAttribute pattr = (XmlRpcParameterAttribute)
Attribute.GetCustomAttribute(parm,
typeof(XmlRpcParameterAttribute));
if (pattr != null)
{
parmInfo.Doc = pattr.Description;
parmInfo.XmlRpcName = pattr.Name;
}
parmInfo.IsParams = Attribute.IsDefined(parm,
typeof(ParamArrayAttribute));
parmList.Add(parmInfo);
}
mthdInfo.Parameters = parmList.ToArray();
// extract return type information
mthdInfo.ReturnType = mi.ReturnType;
mthdInfo.ReturnXmlRpcType = GetXmlRpcTypeString(mi.ReturnType);
object[] orattrs = mi.ReturnTypeCustomAttributes.GetCustomAttributes(
typeof(XmlRpcReturnValueAttribute), false);
if (orattrs.Length > 0)
{
mthdInfo.ReturnDoc = ((XmlRpcReturnValueAttribute)orattrs[0]).Description;
}
if (methods.ContainsKey(mthdInfo.XmlRpcName))
{
throw new XmlRpcDupXmlRpcMethodNames(String.Format("Method "
+ "{0} in type {1} has duplicate XmlRpc method name {2}",
mi.Name, type.Name, mthdInfo.XmlRpcName));
}
else
methods.Add(mthdInfo.XmlRpcName, mthdInfo);
}
public MethodInfo GetMethodInfo(string xmlRpcMethodName)
{
foreach (XmlRpcMethodInfo xmi in methodInfos)
{
if (xmlRpcMethodName == xmi.XmlRpcName)
{
return xmi.MethodInfo;
}
}
return null;
}
static bool IsVisibleXmlRpcMethod(MethodInfo mi)
{
bool ret = false;
Attribute attr = Attribute.GetCustomAttribute(mi,
typeof(XmlRpcMethodAttribute));
if (attr != null)
{
XmlRpcMethodAttribute mattr = (XmlRpcMethodAttribute)attr;
ret = !(mattr.Hidden || mattr.IntrospectionMethod == true);
}
return ret;
}
public static string GetXmlRpcMethodName(MethodInfo mi)
{
XmlRpcMethodAttribute attr = (XmlRpcMethodAttribute)
Attribute.GetCustomAttribute(mi,
typeof(XmlRpcMethodAttribute));
if (attr != null
&& attr.Method != null
&& attr.Method != "")
{
return attr.Method;
}
else
{
return mi.Name;
}
}
public string GetMethodName(string XmlRpcMethodName)
{
foreach (XmlRpcMethodInfo methodInfo in methodInfos)
{
if (methodInfo.XmlRpcName == XmlRpcMethodName)
return methodInfo.MiName;
}
return null;
}
public String Doc
{
get { return doc; }
set { doc = value; }
}
public String Name
{
get { return name; }
set { name = value; }
}
public XmlRpcMethodInfo[] Methods
{
get { return methodInfos; }
}
public XmlRpcMethodInfo GetMethod(
String methodName)
{
foreach (XmlRpcMethodInfo mthdInfo in methodInfos)
{
if (mthdInfo.XmlRpcName == methodName)
return mthdInfo;
}
return null;
}
private XmlRpcServiceInfo()
{
}
public static XmlRpcType GetXmlRpcType(Type t)
{
XmlRpcType ret;
if (t == typeof(Int32))
ret = XmlRpcType.tInt32;
else if (t == typeof(int?))
ret = XmlRpcType.tInt32;
else if (t == typeof(Boolean))
ret = XmlRpcType.tBoolean;
else if (t == typeof(Boolean?))
ret = XmlRpcType.tBoolean;
else if (t == typeof(String))
ret = XmlRpcType.tString;
else if (t == typeof(Double))
ret = XmlRpcType.tDouble;
else if (t == typeof(Double?))
ret = XmlRpcType.tDouble;
else if (t == typeof(DateTime))
ret = XmlRpcType.tDateTime;
else if (t == typeof(DateTime?))
ret = XmlRpcType.tDateTime;
else if (t == typeof(byte[]))
ret = XmlRpcType.tBase64;
else if (t == typeof(XmlRpcStruct))
{
ret = XmlRpcType.tHashtable;
}
else if (t == typeof(Array))
ret = XmlRpcType.tArray;
else if (t.IsArray)
{
#if (!COMPACT_FRAMEWORK)
Type elemType = t.GetElementType();
if (elemType != typeof(Object)
&& GetXmlRpcType(elemType) == XmlRpcType.tInvalid)
{
ret = XmlRpcType.tInvalid;
}
else
{
if (t.GetArrayRank() == 1) // single dim array
ret = XmlRpcType.tArray;
else
ret = XmlRpcType.tMultiDimArray;
}
#else
//!! check types of array elements if not Object[]
Type elemType = null;
string[] checkSingleDim = Regex.Split(t.FullName, "\\[\\]$");
if (checkSingleDim.Length > 1) // single dim array
{
elemType = Type.GetType(checkSingleDim[0]);
ret = XmlRpcType.tArray;
}
else
{
string[] checkMultiDim = Regex.Split(t.FullName, "\\[,[,]*\\]$");
if (checkMultiDim.Length > 1)
{
elemType = Type.GetType(checkMultiDim[0]);
ret = XmlRpcType.tMultiDimArray;
}
else
ret = XmlRpcType.tInvalid;
}
if (elemType != null)
{
if (elemType != typeof(Object)
&& GetXmlRpcType(elemType) == XmlRpcType.tInvalid)
{
ret = XmlRpcType.tInvalid;
}
}
#endif
}
else if (t == typeof(int?))
ret = XmlRpcType.tInt32;
else if (t == typeof(Boolean?))
ret = XmlRpcType.tBoolean;
else if (t == typeof(Double?))
ret = XmlRpcType.tDouble;
else if (t == typeof(DateTime?))
ret = XmlRpcType.tDateTime;
else if (t == typeof(void))
{
ret = XmlRpcType.tVoid;
}
else if ((t.IsValueType && !t.IsPrimitive && !t.IsEnum)
|| t.IsClass)
{
// if type is struct or class its only valid for XML-RPC mapping if all
// its members have a valid mapping or are of type object which
// maps to any XML-RPC type
MemberInfo[] mis = t.GetMembers();
foreach (MemberInfo mi in mis)
{
if (mi.MemberType == MemberTypes.Field)
{
FieldInfo fi = (FieldInfo)mi;
if ((fi.FieldType == t) || (fi.FieldType != typeof(Object)
&& GetXmlRpcType(fi.FieldType) == XmlRpcType.tInvalid))
{
return XmlRpcType.tInvalid;
}
}
else if (mi.MemberType == MemberTypes.Property)
{
PropertyInfo pi = (PropertyInfo)mi;
if ((pi.PropertyType == t) || (pi.PropertyType != typeof(Object)
&& GetXmlRpcType(pi.PropertyType) == XmlRpcType.tInvalid))
{
return XmlRpcType.tInvalid;
}
}
}
ret = XmlRpcType.tStruct;
}
else
ret = XmlRpcType.tInvalid;
return ret;
}
static public string GetXmlRpcTypeString(Type t)
{
XmlRpcType rpcType = GetXmlRpcType(t);
return GetXmlRpcTypeString(rpcType);
}
static public string GetXmlRpcTypeString(XmlRpcType t)
{
string ret = null;
if (t == XmlRpcType.tInt32)
ret = "integer";
else if (t == XmlRpcType.tBoolean)
ret = "boolean";
else if (t == XmlRpcType.tString)
ret = "string";
else if (t == XmlRpcType.tDouble)
ret = "double";
else if (t == XmlRpcType.tDateTime)
ret = "dateTime";
else if (t == XmlRpcType.tBase64)
ret = "base64";
else if (t == XmlRpcType.tStruct)
ret = "struct";
else if (t == XmlRpcType.tHashtable)
ret = "struct";
else if (t == XmlRpcType.tArray)
ret = "array";
else if (t == XmlRpcType.tMultiDimArray)
ret = "array";
else if (t == XmlRpcType.tVoid)
ret = "void";
else
ret = null;
return ret;
}
XmlRpcMethodInfo[] methodInfos;
String doc;
string name;
}
}
| |
using System;
using System.Threading.Tasks;
using System.Web;
using System.Net;
using System.Text;
using System.IO;
using System.Threading;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.ComponentModel;
using SteamBot.SteamGroups;
using SteamKit2;
using SteamTrade;
using SteamKit2.Internal;
using SteamTrade.TradeOffer;
namespace SteamBot
{
public class Bot
{
public string BotControlClass;
// If the bot is logged in fully or not. This is only set
// when it is.
public bool IsLoggedIn = false;
// The bot's display name. Changing this does not mean that
// the bot's name will change.
public string DisplayName { get; private set; }
// The response to all chat messages sent to it.
public string ChatResponse;
// A list of SteamIDs that this bot recognizes as admins.
public ulong[] Admins;
public SteamFriends SteamFriends;
public SteamClient SteamClient;
public SteamTrading SteamTrade;
public SteamUser SteamUser;
public SteamGameCoordinator SteamGameCoordinator;
public SteamNotifications SteamNotifications;
// The current trade; if the bot is not in a trade, this is
// null.
public Trade CurrentTrade;
public bool IsDebugMode = false;
// The log for the bot. This logs with the bot's display name.
public Log log;
public delegate UserHandler UserHandlerCreator(Bot bot, SteamID id);
public UserHandlerCreator CreateHandler;
Dictionary<ulong, UserHandler> userHandlers = new Dictionary<ulong, UserHandler>();
private List<SteamID> friends;
public IEnumerable<SteamID> FriendsList
{
get
{
CreateFriendsListIfNecessary();
return friends;
}
}
// The maximum amount of time the bot will trade for.
public int MaximumTradeTime { get; private set; }
// The maximum amount of time the bot will wait in between
// trade actions.
public int MaximiumActionGap { get; private set; }
//The current game that the bot is playing, for posterity.
public int CurrentGame = 0;
// The Steam Web API key.
public string ApiKey { get; private set; }
public SteamWeb SteamWeb { get; private set; }
// The prefix put in the front of the bot's display name.
string DisplayNamePrefix;
// Log level to use for this bot
Log.LogLevel LogLevel;
// The number, in milliseconds, between polls for the trade.
int TradePollingInterval;
public string MyUserNonce;
public string MyUniqueId;
bool CookiesAreInvalid = true;
bool isprocess;
public bool IsRunning = false;
public string AuthCode { get; set; }
SteamUser.LogOnDetails logOnDetails;
TradeManager tradeManager;
private TradeOfferManager tradeOfferManager;
private Task<Inventory> myInventoryTask;
public Inventory MyInventory
{
get
{
myInventoryTask.Wait();
return myInventoryTask.Result;
}
}
private BackgroundWorker backgroundWorker;
public Bot(Configuration.BotInfo config, string apiKey, UserHandlerCreator handlerCreator, bool debug = false, bool process = false)
{
logOnDetails = new SteamUser.LogOnDetails
{
Username = config.Username,
Password = config.Password
};
DisplayName = config.DisplayName;
ChatResponse = config.ChatResponse;
MaximumTradeTime = config.MaximumTradeTime;
MaximiumActionGap = config.MaximumActionGap;
DisplayNamePrefix = config.DisplayNamePrefix;
TradePollingInterval = config.TradePollingInterval <= 100 ? 800 : config.TradePollingInterval;
Admins = config.Admins;
this.ApiKey = !String.IsNullOrEmpty(config.ApiKey) ? config.ApiKey : apiKey;
this.isprocess = process;
try
{
LogLevel = (Log.LogLevel)Enum.Parse(typeof(Log.LogLevel), config.LogLevel, true);
}
catch (ArgumentException)
{
Console.WriteLine("Invalid LogLevel provided in configuration. Defaulting to 'INFO'");
LogLevel = Log.LogLevel.Info;
}
log = new Log (config.LogFile, this.DisplayName, LogLevel);
CreateHandler = handlerCreator;
BotControlClass = config.BotControlClass;
SteamWeb = new SteamWeb();
// Hacking around https
ServicePointManager.ServerCertificateValidationCallback += SteamWeb.ValidateRemoteCertificate;
log.Debug ("Initializing Steam Bot...");
SteamClient = new SteamClient();
SteamClient.AddHandler(new SteamNotifications());
SteamTrade = SteamClient.GetHandler<SteamTrading>();
SteamUser = SteamClient.GetHandler<SteamUser>();
SteamFriends = SteamClient.GetHandler<SteamFriends>();
SteamGameCoordinator = SteamClient.GetHandler<SteamGameCoordinator>();
SteamNotifications = SteamClient.GetHandler<SteamNotifications>();
backgroundWorker = new BackgroundWorker { WorkerSupportsCancellation = true };
backgroundWorker.DoWork += BackgroundWorkerOnDoWork;
backgroundWorker.RunWorkerCompleted += BackgroundWorkerOnRunWorkerCompleted;
backgroundWorker.RunWorkerAsync();
}
private void CreateFriendsListIfNecessary()
{
if (friends != null)
return;
friends = new List<SteamID>();
for (int i = 0; i < SteamFriends.GetFriendCount(); i++)
{
friends.Add(SteamFriends.GetFriendByIndex(i));
}
}
/// <summary>
/// Occurs when the bot needs the SteamGuard authentication code.
/// </summary>
/// <remarks>
/// Return the code in <see cref="SteamGuardRequiredEventArgs.SteamGuard"/>
/// </remarks>
public event EventHandler<SteamGuardRequiredEventArgs> OnSteamGuardRequired;
/// <summary>
/// Starts the callback thread and connects to Steam via SteamKit2.
/// </summary>
/// <remarks>
/// THIS NEVER RETURNS.
/// </remarks>
/// <returns><c>true</c>. See remarks</returns>
public bool StartBot()
{
IsRunning = true;
log.Info("Connecting...");
if (!backgroundWorker.IsBusy)
// background worker is not running
backgroundWorker.RunWorkerAsync();
SteamClient.Connect();
log.Success("Done Loading Bot!");
return true; // never get here
}
/// <summary>
/// Disconnect from the Steam network and stop the callback
/// thread.
/// </summary>
public void StopBot()
{
IsRunning = false;
log.Debug("Trying to shut down bot thread.");
SteamClient.Disconnect();
backgroundWorker.CancelAsync();
}
/// <summary>
/// Creates a new trade with the given partner.
/// </summary>
/// <returns>
/// <c>true</c>, if trade was opened,
/// <c>false</c> if there is another trade that must be closed first.
/// </returns>
public bool OpenTrade (SteamID other)
{
if (CurrentTrade != null || CheckCookies() == false)
return false;
SteamTrade.Trade(other);
return true;
}
/// <summary>
/// Closes the current active trade.
/// </summary>
public void CloseTrade()
{
if (CurrentTrade == null)
return;
UnsubscribeTrade (GetUserHandler (CurrentTrade.OtherSID), CurrentTrade);
tradeManager.StopTrade ();
CurrentTrade = null;
}
void OnTradeTimeout(object sender, EventArgs args)
{
// ignore event params and just null out the trade.
GetUserHandler (CurrentTrade.OtherSID).OnTradeTimeout();
}
/// <summary>
/// Create a new trade offer with the specified partner
/// </summary>
/// <param name="other">SteamId of the partner</param>
/// <returns></returns>
public TradeOffer NewTradeOffer(SteamID other)
{
return tradeOfferManager.NewOffer(other);
}
/// <summary>
/// Try to get a specific trade offer using the offerid
/// </summary>
/// <param name="offerId"></param>
/// <param name="tradeOffer"></param>
/// <returns></returns>
public bool TryGetTradeOffer(string offerId, out TradeOffer tradeOffer)
{
return tradeOfferManager.GetOffer(offerId, out tradeOffer);
}
public void HandleBotCommand(string command)
{
try
{
GetUserHandler(SteamClient.SteamID).OnBotCommand(command);
}
catch (ObjectDisposedException e)
{
// Writing to console because odds are the error was caused by a disposed log.
Console.WriteLine(string.Format("Exception caught in BotCommand Thread: {0}", e));
if (!this.IsRunning)
{
Console.WriteLine("The Bot is no longer running and could not write to the log. Try Starting this bot first.");
}
}
catch (Exception e)
{
Console.WriteLine(string.Format("Exception caught in BotCommand Thread: {0}", e));
}
}
bool HandleTradeSessionStart (SteamID other)
{
if (CurrentTrade != null)
return false;
try
{
tradeManager.InitializeTrade(SteamUser.SteamID, other);
CurrentTrade = tradeManager.CreateTrade (SteamUser.SteamID, other);
CurrentTrade.OnClose += CloseTrade;
SubscribeTrade(CurrentTrade, GetUserHandler(other));
tradeManager.StartTradeThread(CurrentTrade);
return true;
}
catch (SteamTrade.Exceptions.InventoryFetchException ie)
{
// we shouldn't get here because the inv checks are also
// done in the TradeProposedCallback handler.
string response = String.Empty;
if (ie.FailingSteamId.ConvertToUInt64() == other.ConvertToUInt64())
{
response = "Trade failed. Could not correctly fetch your backpack. Either the inventory is inaccessible or your backpack is private.";
}
else
{
response = "Trade failed. Could not correctly fetch my backpack.";
}
SteamFriends.SendChatMessage(other,
EChatEntryType.ChatMsg,
response);
log.Info ("Bot sent other: " + response);
CurrentTrade = null;
return false;
}
}
public void SetGamePlaying(int id)
{
var gamePlaying = new SteamKit2.ClientMsgProtobuf<CMsgClientGamesPlayed>(EMsg.ClientGamesPlayed);
if (id != 0)
gamePlaying.Body.games_played.Add(new CMsgClientGamesPlayed.GamePlayed
{
game_id = new GameID(id),
});
SteamClient.Send(gamePlaying);
CurrentGame = id;
}
void HandleSteamMessage(ICallbackMsg msg)
{
log.Debug(msg.ToString());
#region Login
msg.Handle<SteamClient.ConnectedCallback> (callback =>
{
log.Debug ("Connection Callback: " + callback.Result);
if (callback.Result == EResult.OK)
{
UserLogOn();
}
else
{
log.Error ("Failed to connect to Steam Community, trying again...");
SteamClient.Connect ();
}
});
msg.Handle<SteamUser.LoggedOnCallback> (callback =>
{
log.Debug ("Logged On Callback: " + callback.Result);
if (callback.Result == EResult.OK)
{
MyUserNonce = callback.WebAPIUserNonce;
}
else
{
log.Error ("Login Error: " + callback.Result);
}
if (callback.Result == EResult.AccountLogonDenied)
{
log.Interface ("This account is SteamGuard enabled. Enter the code via the `auth' command.");
// try to get the steamguard auth code from the event callback
var eva = new SteamGuardRequiredEventArgs();
FireOnSteamGuardRequired(eva);
if (!String.IsNullOrEmpty(eva.SteamGuard))
logOnDetails.AuthCode = eva.SteamGuard;
else
logOnDetails.AuthCode = Console.ReadLine();
}
if (callback.Result == EResult.InvalidLoginAuthCode)
{
log.Interface("The given SteamGuard code was invalid. Try again using the `auth' command.");
logOnDetails.AuthCode = Console.ReadLine();
}
});
msg.Handle<SteamUser.LoginKeyCallback> (callback =>
{
MyUniqueId = callback.UniqueID.ToString();
UserWebLogOn();
if (Trade.CurrentSchema == null)
{
log.Info ("Downloading Schema...");
Trade.CurrentSchema = Schema.FetchSchema (ApiKey);
log.Success ("Schema Downloaded!");
}
SteamFriends.SetPersonaName (DisplayNamePrefix+DisplayName);
SteamFriends.SetPersonaState (EPersonaState.Online);
log.Success ("Steam Bot Logged In Completely!");
IsLoggedIn = true;
GetUserHandler(SteamClient.SteamID).OnLoginCompleted();
});
msg.Handle<SteamUser.WebAPIUserNonceCallback>(webCallback =>
{
log.Debug("Received new WebAPIUserNonce.");
if (webCallback.Result == EResult.OK)
{
MyUserNonce = webCallback.Nonce;
UserWebLogOn();
}
else
{
log.Error("WebAPIUserNonce Error: " + webCallback.Result);
}
});
msg.Handle<SteamUser.UpdateMachineAuthCallback>(
authCallback => OnUpdateMachineAuthCallback(authCallback)
);
#endregion
#region Friends
msg.Handle<SteamFriends.FriendsListCallback>(callback =>
{
foreach (SteamFriends.FriendsListCallback.Friend friend in callback.FriendList)
{
switch (friend.SteamID.AccountType)
{
case EAccountType.Clan:
if (friend.Relationship == EFriendRelationship.RequestRecipient)
{
if (GetUserHandler(friend.SteamID).OnGroupAdd())
{
AcceptGroupInvite(friend.SteamID);
}
else
{
DeclineGroupInvite(friend.SteamID);
}
}
break;
default:
CreateFriendsListIfNecessary();
if (friend.Relationship == EFriendRelationship.None)
{
friends.Remove(friend.SteamID);
GetUserHandler(friend.SteamID).OnFriendRemove();
RemoveUserHandler(friend.SteamID);
}
else if (friend.Relationship == EFriendRelationship.RequestRecipient)
{
if (GetUserHandler(friend.SteamID).OnFriendAdd())
{
if (!friends.Contains(friend.SteamID))
{
friends.Add(friend.SteamID);
}
else
{
log.Error("Friend was added who was already in friends list: " + friend.SteamID);
}
SteamFriends.AddFriend(friend.SteamID);
}
else
{
SteamFriends.RemoveFriend(friend.SteamID);
RemoveUserHandler(friend.SteamID);
}
}
break;
}
}
});
msg.Handle<SteamFriends.FriendMsgCallback> (callback =>
{
EChatEntryType type = callback.EntryType;
if (callback.EntryType == EChatEntryType.ChatMsg)
{
log.Info (String.Format ("Chat Message from {0}: {1}",
SteamFriends.GetFriendPersonaName (callback.Sender),
callback.Message
));
GetUserHandler(callback.Sender).OnMessageHandler(callback.Message, type);
}
});
#endregion
#region Group Chat
msg.Handle<SteamFriends.ChatMsgCallback>(callback =>
{
GetUserHandler(callback.ChatterID).OnChatRoomMessage(callback.ChatRoomID, callback.ChatterID, callback.Message);
});
#endregion
#region Trading
msg.Handle<SteamTrading.SessionStartCallback> (callback =>
{
bool started = HandleTradeSessionStart (callback.OtherClient);
if (!started)
log.Error ("Could not start the trade session.");
else
log.Debug ("SteamTrading.SessionStartCallback handled successfully. Trade Opened.");
});
msg.Handle<SteamTrading.TradeProposedCallback> (callback =>
{
if (CheckCookies() == false)
{
SteamTrade.RespondToTrade(callback.TradeID, false);
return;
}
try
{
tradeManager.InitializeTrade(SteamUser.SteamID, callback.OtherClient);
}
catch (WebException we)
{
SteamFriends.SendChatMessage(callback.OtherClient,
EChatEntryType.ChatMsg,
"Trade error: " + we.Message);
SteamTrade.RespondToTrade(callback.TradeID, false);
return;
}
catch (Exception)
{
SteamFriends.SendChatMessage(callback.OtherClient,
EChatEntryType.ChatMsg,
"Trade declined. Could not correctly fetch your backpack.");
SteamTrade.RespondToTrade(callback.TradeID, false);
return;
}
//if (tradeManager.OtherInventory.IsPrivate)
//{
// SteamFriends.SendChatMessage(callback.OtherClient,
// EChatEntryType.ChatMsg,
// "Trade declined. Your backpack cannot be private.");
// SteamTrade.RespondToTrade (callback.TradeID, false);
// return;
//}
if (CurrentTrade == null && GetUserHandler (callback.OtherClient).OnTradeRequest ())
SteamTrade.RespondToTrade (callback.TradeID, true);
else
SteamTrade.RespondToTrade (callback.TradeID, false);
});
msg.Handle<SteamTrading.TradeResultCallback> (callback =>
{
if (callback.Response == EEconTradeResponse.Accepted)
{
log.Debug ("Trade Status: " + callback.Response);
log.Info ("Trade Accepted!");
GetUserHandler(callback.OtherClient).OnTradeRequestReply(true, callback.Response.ToString());
}
else
{
log.Warn ("Trade failed: " + callback.Response);
CloseTrade ();
GetUserHandler(callback.OtherClient).OnTradeRequestReply(false, callback.Response.ToString());
}
});
#endregion
#region Disconnect
msg.Handle<SteamUser.LoggedOffCallback> (callback =>
{
IsLoggedIn = false;
log.Warn ("Logged Off: " + callback.Result);
});
msg.Handle<SteamClient.DisconnectedCallback> (callback =>
{
IsLoggedIn = false;
CloseTrade ();
log.Warn ("Disconnected from Steam Network!");
SteamClient.Connect ();
});
#endregion
#region Notifications
msg.Handle<SteamBot.SteamNotifications.NotificationCallback>(callback =>
{
//currently only appears to be of trade offer
if (callback.Notifications.Count != 0)
{
foreach (var notification in callback.Notifications)
{
log.Info(notification.UserNotificationType + " notification");
}
}
// Get offers only if cookies are valid
if (CheckCookies())
tradeOfferManager.GetOffers();
});
msg.Handle<SteamBot.SteamNotifications.CommentNotificationCallback>(callback =>
{
//various types of comment notifications on profile/activity feed etc
//log.Info("received CommentNotificationCallback");
//log.Info("New Commments " + callback.CommentNotifications.CountNewComments);
//log.Info("New Commments Owners " + callback.CommentNotifications.CountNewCommentsOwner);
//log.Info("New Commments Subscriptions" + callback.CommentNotifications.CountNewCommentsSubscriptions);
});
#endregion
}
void UserLogOn()
{
// get sentry file which has the machine hw info saved
// from when a steam guard code was entered
Directory.CreateDirectory(System.IO.Path.Combine(System.Windows.Forms.Application.StartupPath, "sentryfiles"));
FileInfo fi = new FileInfo(System.IO.Path.Combine("sentryfiles",String.Format("{0}.sentryfile", logOnDetails.Username)));
if (fi.Exists && fi.Length > 0)
logOnDetails.SentryFileHash = SHAHash(File.ReadAllBytes(fi.FullName));
else
logOnDetails.SentryFileHash = null;
SteamUser.LogOn(logOnDetails);
}
void UserWebLogOn()
{
while (true)
{
bool authd = SteamWeb.Authenticate(MyUniqueId, SteamClient, MyUserNonce);
if (authd)
{
log.Success("User Authenticated!");
tradeManager = new TradeManager(ApiKey, SteamWeb);
tradeManager.SetTradeTimeLimits(MaximumTradeTime, MaximiumActionGap, TradePollingInterval);
tradeManager.OnTimeout += OnTradeTimeout;
tradeOfferManager = new TradeOfferManager(ApiKey, SteamWeb);
SubscribeTradeOffer(tradeOfferManager);
CookiesAreInvalid = false;
// Success, check trade offers which we have received while we were offline
tradeOfferManager.GetOffers();
break;
}
else
{
log.Warn("Authentication failed, retrying in 2s...");
Thread.Sleep(2000);
}
}
}
/// <summary>
/// Checks if sessionId and token cookies are still valid.
/// Sets cookie flag if they are invalid.
/// </summary>
/// <returns>true if cookies are valid; otherwise false</returns>
bool CheckCookies()
{
// We still haven't re-authenticated
if (CookiesAreInvalid)
return false;
try
{
if (!SteamWeb.VerifyCookies())
{
// Cookies are no longer valid
log.Warn("Cookies are invalid. Need to re-authenticate.");
CookiesAreInvalid = true;
SteamUser.RequestWebAPIUserNonce();
return false;
}
}
catch
{
// Even if exception is caught, we should still continue.
log.Warn("Cookie check failed. http://steamcommunity.com is possibly down.");
}
return true;
}
UserHandler GetUserHandler(SteamID sid)
{
if (!userHandlers.ContainsKey(sid))
{
userHandlers[sid.ConvertToUInt64()] = CreateHandler(this, sid);
}
return userHandlers[sid.ConvertToUInt64()];
}
void RemoveUserHandler(SteamID sid)
{
if (userHandlers.ContainsKey(sid))
{
userHandlers.Remove(sid);
}
}
static byte [] SHAHash (byte[] input)
{
SHA1Managed sha = new SHA1Managed();
byte[] output = sha.ComputeHash( input );
sha.Clear();
return output;
}
void OnUpdateMachineAuthCallback(SteamUser.UpdateMachineAuthCallback machineAuth)
{
byte[] hash = SHAHash (machineAuth.Data);
Directory.CreateDirectory(System.IO.Path.Combine(System.Windows.Forms.Application.StartupPath, "sentryfiles"));
File.WriteAllBytes (System.IO.Path.Combine("sentryfiles", String.Format("{0}.sentryfile", logOnDetails.Username)), machineAuth.Data);
var authResponse = new SteamUser.MachineAuthDetails
{
BytesWritten = machineAuth.BytesToWrite,
FileName = machineAuth.FileName,
FileSize = machineAuth.BytesToWrite,
Offset = machineAuth.Offset,
SentryFileHash = hash, // should be the sha1 hash of the sentry file we just wrote
OneTimePassword = machineAuth.OneTimePassword, // not sure on this one yet, since we've had no examples of steam using OTPs
LastError = 0, // result from win32 GetLastError
Result = EResult.OK, // if everything went okay, otherwise ~who knows~
JobID = machineAuth.JobID, // so we respond to the correct server job
};
// send off our response
SteamUser.SendMachineAuthResponse (authResponse);
}
/// <summary>
/// Gets the bot's inventory and stores it in MyInventory.
/// </summary>
/// <example> This sample shows how to find items in the bot's inventory from a user handler.
/// <code>
/// Bot.GetInventory(); // Get the inventory first
/// foreach (var item in Bot.MyInventory.Items)
/// {
/// if (item.Defindex == 5021)
/// {
/// // Bot has a key in its inventory
/// }
/// }
/// </code>
/// </example>
public void GetInventory()
{
myInventoryTask = Task.Factory.StartNew(() => Inventory.FetchInventory(SteamUser.SteamID, ApiKey, SteamWeb));
}
public void TradeOfferRouter(TradeOffer offer)
{
if (offer.OfferState == TradeOfferState.TradeOfferStateActive)
{
GetUserHandler(offer.PartnerSteamId).OnNewTradeOffer(offer);
}
}
public void SubscribeTradeOffer(TradeOfferManager tradeOfferManager)
{
tradeOfferManager.OnNewTradeOffer += TradeOfferRouter;
}
//todo: should unsubscribe eventually...
public void UnsubscribeTradeOffer(TradeOfferManager tradeOfferManager)
{
tradeOfferManager.OnNewTradeOffer -= TradeOfferRouter;
}
/// <summary>
/// Subscribes all listeners of this to the trade.
/// </summary>
public void SubscribeTrade (Trade trade, UserHandler handler)
{
trade.OnSuccess += handler.OnTradeSuccess;
trade.OnClose += handler.OnTradeClose;
trade.OnError += handler.OnTradeError;
trade.OnStatusError += handler.OnStatusError;
//trade.OnTimeout += OnTradeTimeout;
trade.OnAfterInit += handler.OnTradeInit;
trade.OnUserAddItem += handler.OnTradeAddItem;
trade.OnUserRemoveItem += handler.OnTradeRemoveItem;
trade.OnMessage += handler.OnTradeMessageHandler;
trade.OnUserSetReady += handler.OnTradeReadyHandler;
trade.OnUserAccept += handler.OnTradeAcceptHandler;
}
/// <summary>
/// Unsubscribes all listeners of this from the current trade.
/// </summary>
public void UnsubscribeTrade (UserHandler handler, Trade trade)
{
trade.OnSuccess -= handler.OnTradeSuccess;
trade.OnClose -= handler.OnTradeClose;
trade.OnError -= handler.OnTradeError;
trade.OnStatusError -= handler.OnStatusError;
//Trade.OnTimeout -= OnTradeTimeout;
trade.OnAfterInit -= handler.OnTradeInit;
trade.OnUserAddItem -= handler.OnTradeAddItem;
trade.OnUserRemoveItem -= handler.OnTradeRemoveItem;
trade.OnMessage -= handler.OnTradeMessageHandler;
trade.OnUserSetReady -= handler.OnTradeReadyHandler;
trade.OnUserAccept -= handler.OnTradeAcceptHandler;
}
#region Background Worker Methods
private void BackgroundWorkerOnRunWorkerCompleted(object sender, RunWorkerCompletedEventArgs runWorkerCompletedEventArgs)
{
if (runWorkerCompletedEventArgs.Error != null)
{
Exception ex = runWorkerCompletedEventArgs.Error;
var s = string.Format("Unhandled exceptions in bot {0} callback thread: {1} {2}",
DisplayName,
Environment.NewLine,
ex);
log.Error(s);
log.Info("This bot died. Stopping it..");
//backgroundWorker.RunWorkerAsync();
//Thread.Sleep(10000);
StopBot();
//StartBot();
}
log.Dispose();
}
private void BackgroundWorkerOnDoWork(object sender, DoWorkEventArgs doWorkEventArgs)
{
ICallbackMsg msg;
while (!backgroundWorker.CancellationPending)
{
try
{
msg = SteamClient.WaitForCallback(true);
HandleSteamMessage(msg);
}
catch (WebException e)
{
log.Error("URI: " + (e.Response != null && e.Response.ResponseUri != null ? e.Response.ResponseUri.ToString() : "unknown") + " >> " + e.ToString());
System.Threading.Thread.Sleep(45000);//Steam is down, retry in 45 seconds.
}
catch (Exception e)
{
log.Error(e.ToString());
log.Warn("Restarting bot...");
}
}
}
#endregion Background Worker Methods
private void FireOnSteamGuardRequired(SteamGuardRequiredEventArgs e)
{
// Set to null in case this is another attempt
this.AuthCode = null;
EventHandler<SteamGuardRequiredEventArgs> handler = OnSteamGuardRequired;
if (handler != null)
handler(this, e);
else
{
while (true)
{
if (this.AuthCode != null)
{
e.SteamGuard = this.AuthCode;
break;
}
Thread.Sleep(5);
}
}
}
#region Group Methods
/// <summary>
/// Accepts the invite to a Steam Group
/// </summary>
/// <param name="group">SteamID of the group to accept the invite from.</param>
private void AcceptGroupInvite(SteamID group)
{
var AcceptInvite = new ClientMsg<CMsgGroupInviteAction>((int)EMsg.ClientAcknowledgeClanInvite);
AcceptInvite.Body.GroupID = group.ConvertToUInt64();
AcceptInvite.Body.AcceptInvite = true;
this.SteamClient.Send(AcceptInvite);
}
/// <summary>
/// Declines the invite to a Steam Group
/// </summary>
/// <param name="group">SteamID of the group to decline the invite from.</param>
private void DeclineGroupInvite(SteamID group)
{
var DeclineInvite = new ClientMsg<CMsgGroupInviteAction>((int)EMsg.ClientAcknowledgeClanInvite);
DeclineInvite.Body.GroupID = group.ConvertToUInt64();
DeclineInvite.Body.AcceptInvite = false;
this.SteamClient.Send(DeclineInvite);
}
/// <summary>
/// Invites a use to the specified Steam Group
/// </summary>
/// <param name="user">SteamID of the user to invite.</param>
/// <param name="groupId">SteamID of the group to invite the user to.</param>
public void InviteUserToGroup(SteamID user, SteamID groupId)
{
var InviteUser = new ClientMsg<CMsgInviteUserToGroup>((int)EMsg.ClientInviteUserToClan);
InviteUser.Body.GroupID = groupId.ConvertToUInt64();
InviteUser.Body.Invitee = user.ConvertToUInt64();
InviteUser.Body.UnknownInfo = true;
this.SteamClient.Send(InviteUser);
}
#endregion
}
}
| |
using System;
using Eto.Forms;
using Eto.Drawing;
using Eto.GtkSharp.Drawing;
namespace Eto.GtkSharp.Forms.Controls
{
public class LabelHandler : GtkControl<LabelHandler.EtoLabel, Label, Label.ICallback>, Label.IHandler
{
readonly Gtk.EventBox eventBox;
TextAlignment horizontalAlign = TextAlignment.Left;
VerticalAlignment verticalAlign = VerticalAlignment.Top;
public override Gtk.Widget ContainerControl
{
get { return eventBox; }
}
public override Gtk.Widget EventControl
{
get { return eventBox; }
}
public class EtoLabel : Gtk.Label
{
int wrapWidth;
public void ResetWidth()
{
wrapWidth = -1;
}
#if GTK2
protected override void OnSizeRequested(ref Gtk.Requisition requisition)
{
//base.OnSizeRequested (ref requisition);
int width, height;
Layout.GetPixelSize(out width, out height);
requisition.Width = width;
requisition.Height = height;
}
#else
protected override void OnGetPreferredWidth (out int minimum_width, out int natural_width)
{
base.OnGetPreferredWidth (out minimum_width, out natural_width);
//minimum_width = natural_width; // = 500; //this.Layout.Width;
}
protected override void OnAdjustSizeRequest (Gtk.Orientation orientation, out int minimum_size, out int natural_size)
{
base.OnAdjustSizeRequest (orientation, out minimum_size, out natural_size);
if (orientation == Gtk.Orientation.Horizontal)
minimum_size = natural_size;
}
#endif
protected override void OnSizeAllocated(Gdk.Rectangle allocation)
{
base.OnSizeAllocated(allocation);
SetWrapWidth(allocation.Width);
}
void SetWrapWidth(int width)
{
if (!IsRealized || SingleLineMode || width == 0)
return;
if (wrapWidth != width)
{
Layout.Width = (int)(width * Pango.Scale.PangoScale);
int pixWidth, pixHeight;
Layout.GetPixelSize(out pixWidth, out pixHeight);
HeightRequest = pixHeight;
wrapWidth = width;
}
}
}
public LabelHandler()
{
eventBox = new Gtk.EventBox();
//eventBox.VisibleWindow = false;
Control = new EtoLabel
{
SingleLineMode = false,
LineWrap = true,
LineWrapMode = Pango.WrapMode.Word
};
Control.SetAlignment(0, 0);
eventBox.Child = Control;
}
public WrapMode Wrap
{
get
{
if (!Control.LineWrap)
return WrapMode.None;
if (Control.LineWrapMode == Pango.WrapMode.Word)
return WrapMode.Word;
return WrapMode.Character;
}
set
{
Control.ResetWidth();
switch (value)
{
case WrapMode.None:
Control.Wrap = false;
Control.LineWrap = false;
Control.SingleLineMode = true;
break;
case WrapMode.Word:
Control.Wrap = true;
Control.LineWrapMode = Pango.WrapMode.Word;
Control.LineWrap = true;
Control.SingleLineMode = false;
break;
case WrapMode.Character:
Control.Wrap = true;
Control.LineWrapMode = Pango.WrapMode.Char;
Control.LineWrap = true;
Control.SingleLineMode = false;
break;
default:
throw new NotSupportedException();
}
}
}
public override void AttachEvent(string id)
{
switch (id)
{
case TextControl.TextChangedEvent:
break;
default:
base.AttachEvent(id);
break;
}
}
public virtual Color TextColor
{
get { return Control.GetForeground(); }
set { Control.SetForeground(value); }
}
public override string Text
{
get { return Control.Text.ToEtoMnemonic(); }
set
{
Control.ResetWidth();
Control.TextWithMnemonic = value.ToPlatformMnemonic();
}
}
public TextAlignment TextAlignment
{
get { return horizontalAlign; }
set
{
horizontalAlign = value;
SetAlignment();
}
}
void SetAlignment()
{
float xalignment;
float yalignment;
switch (horizontalAlign)
{
default:
xalignment = 0F;
break;
case TextAlignment.Center:
xalignment = 0.5F;
break;
case TextAlignment.Right:
xalignment = 1F;
break;
}
switch (verticalAlign)
{
case VerticalAlignment.Center:
yalignment = 0.5F;
break;
default:
yalignment = 0F;
break;
case VerticalAlignment.Bottom:
yalignment = 1F;
break;
}
Control.SetAlignment(xalignment, yalignment);
Control.Justify = horizontalAlign.ToGtk();
}
public VerticalAlignment VerticalAlignment
{
get { return verticalAlign; }
set
{
verticalAlign = value;
SetAlignment();
}
}
public override Font Font
{
get { return base.Font; }
set
{
Control.ResetWidth();
base.Font = value;
Control.Attributes = value != null ? ((FontHandler)value.Handler).Attributes : null;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using JCG = J2N.Collections.Generic;
namespace YAF.Lucene.Net.Codecs.PerField
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using FieldInfo = YAF.Lucene.Net.Index.FieldInfo;
using IOUtils = YAF.Lucene.Net.Util.IOUtils;
using RamUsageEstimator = YAF.Lucene.Net.Util.RamUsageEstimator;
using SegmentReadState = YAF.Lucene.Net.Index.SegmentReadState;
using SegmentWriteState = YAF.Lucene.Net.Index.SegmentWriteState;
using Terms = YAF.Lucene.Net.Index.Terms;
/// <summary>
/// Enables per field postings support.
/// <para/>
/// Note, when extending this class, the name (<see cref="PostingsFormat.Name"/>) is
/// written into the index. In order for the field to be read, the
/// name must resolve to your implementation via <see cref="PostingsFormat.ForName(string)"/>.
/// This method uses <see cref="IPostingsFormatFactory.GetPostingsFormat(string)"/> to resolve format names.
/// See <see cref="DefaultPostingsFormatFactory"/> for information about how to implement your own <see cref="PostingsFormat"/>.
/// <para/>
/// Files written by each posting format have an additional suffix containing the
/// format name. For example, in a per-field configuration instead of <c>_1.prx</c>
/// filenames would look like <c>_1_Lucene40_0.prx</c>.
/// <para/>
/// @lucene.experimental
/// </summary>
/// <seealso cref="IPostingsFormatFactory"/>
/// <seealso cref="DefaultPostingsFormatFactory"/>
[PostingsFormatName("PerField40")] // LUCENENET specific - using PostingsFormatName attribute to ensure the default name passed from subclasses is the same as this class name
public abstract class PerFieldPostingsFormat : PostingsFormat
{
// LUCENENET specific - removed this static variable because our name is determined by the PostingsFormatNameAttribute
///// <summary>
///// Name of this <seealso cref="PostingsFormat"/>. </summary>
//public static readonly string PER_FIELD_NAME = "PerField40";
/// <summary>
/// <see cref="FieldInfo"/> attribute name used to store the
/// format name for each field.
/// </summary>
public static readonly string PER_FIELD_FORMAT_KEY = typeof(PerFieldPostingsFormat).Name + ".format";
/// <summary>
/// <see cref="FieldInfo"/> attribute name used to store the
/// segment suffix name for each field.
/// </summary>
public static readonly string PER_FIELD_SUFFIX_KEY = typeof(PerFieldPostingsFormat).Name + ".suffix";
/// <summary>
/// Sole constructor. </summary>
public PerFieldPostingsFormat()
: base()
{
}
public override sealed FieldsConsumer FieldsConsumer(SegmentWriteState state)
{
return new FieldsWriter(this, state);
}
internal class FieldsConsumerAndSuffix : IDisposable
{
internal FieldsConsumer Consumer { get; set; }
internal int Suffix { get; set; }
public void Dispose()
{
Consumer.Dispose();
}
}
private class FieldsWriter : FieldsConsumer
{
private readonly PerFieldPostingsFormat outerInstance;
internal readonly IDictionary<PostingsFormat, FieldsConsumerAndSuffix> formats = new Dictionary<PostingsFormat, FieldsConsumerAndSuffix>();
internal readonly IDictionary<string, int?> suffixes = new Dictionary<string, int?>();
internal readonly SegmentWriteState segmentWriteState;
public FieldsWriter(PerFieldPostingsFormat outerInstance, SegmentWriteState state)
{
this.outerInstance = outerInstance;
segmentWriteState = state;
}
public override TermsConsumer AddField(FieldInfo field)
{
PostingsFormat format = outerInstance.GetPostingsFormatForField(field.Name);
if (format == null)
{
throw new InvalidOperationException("invalid null PostingsFormat for field=\"" + field.Name + "\"");
}
string formatName = format.Name;
string previousValue = field.PutAttribute(PER_FIELD_FORMAT_KEY, formatName);
Debug.Assert(previousValue == null);
int? suffix;
FieldsConsumerAndSuffix consumer;
if (!formats.TryGetValue(format, out consumer) || consumer == null)
{
// First time we are seeing this format; create a new instance
// bump the suffix
if (!suffixes.TryGetValue(formatName, out suffix) || suffix == null)
{
suffix = 0;
}
else
{
suffix = suffix + 1;
}
suffixes[formatName] = suffix;
string segmentSuffix = GetFullSegmentSuffix(field.Name,
segmentWriteState.SegmentSuffix,
GetSuffix(formatName, Convert.ToString(suffix, CultureInfo.InvariantCulture)));
consumer = new FieldsConsumerAndSuffix();
consumer.Consumer = format.FieldsConsumer(new SegmentWriteState(segmentWriteState, segmentSuffix));
consumer.Suffix = suffix.Value; // LUCENENET NOTE: At this point suffix cannot be null
formats[format] = consumer;
}
else
{
// we've already seen this format, so just grab its suffix
Debug.Assert(suffixes.ContainsKey(formatName));
suffix = consumer.Suffix;
}
previousValue = field.PutAttribute(PER_FIELD_SUFFIX_KEY, Convert.ToString(suffix, CultureInfo.InvariantCulture));
Debug.Assert(previousValue == null);
// TODO: we should only provide the "slice" of FIS
// that this PF actually sees ... then stuff like
// .hasProx could work correctly?
// NOTE: .hasProx is already broken in the same way for the non-perfield case,
// if there is a fieldinfo with prox that has no postings, you get a 0 byte file.
return consumer.Consumer.AddField(field);
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
// Close all subs
IOUtils.Dispose(formats.Values);
}
}
}
internal static string GetSuffix(string formatName, string suffix)
{
return formatName + "_" + suffix;
}
internal static string GetFullSegmentSuffix(string fieldName, string outerSegmentSuffix, string segmentSuffix)
{
if (outerSegmentSuffix.Length == 0)
{
return segmentSuffix;
}
else
{
// TODO: support embedding; I think it should work but
// we need a test confirm to confirm
// return outerSegmentSuffix + "_" + segmentSuffix;
throw new InvalidOperationException("cannot embed PerFieldPostingsFormat inside itself (field \"" + fieldName + "\" returned PerFieldPostingsFormat)");
}
}
private class FieldsReader : FieldsProducer
{
private readonly PerFieldPostingsFormat outerInstance;
// LUCENENET specific: Use StringComparer.Ordinal to get the same ordering as Java
internal readonly IDictionary<string, FieldsProducer> fields = new JCG.SortedDictionary<string, FieldsProducer>(StringComparer.Ordinal);
internal readonly IDictionary<string, FieldsProducer> formats = new Dictionary<string, FieldsProducer>();
public FieldsReader(PerFieldPostingsFormat outerInstance, SegmentReadState readState)
{
this.outerInstance = outerInstance;
// Read _X.per and init each format:
bool success = false;
try
{
// Read field name -> format name
foreach (FieldInfo fi in readState.FieldInfos)
{
if (fi.IsIndexed)
{
string fieldName = fi.Name;
string formatName = fi.GetAttribute(PER_FIELD_FORMAT_KEY);
if (formatName != null)
{
// null formatName means the field is in fieldInfos, but has no postings!
string suffix = fi.GetAttribute(PER_FIELD_SUFFIX_KEY);
Debug.Assert(suffix != null);
PostingsFormat format = PostingsFormat.ForName(formatName);
string segmentSuffix = GetSuffix(formatName, suffix);
// LUCENENET: Eliminated extra lookup by using TryGetValue instead of ContainsKey
if (!formats.TryGetValue(segmentSuffix, out Codecs.FieldsProducer field))
{
formats[segmentSuffix] = field = format.FieldsProducer(new SegmentReadState(readState, segmentSuffix));
}
fields[fieldName] = field;
}
}
}
success = true;
}
finally
{
if (!success)
{
IOUtils.DisposeWhileHandlingException(formats.Values);
}
}
}
public override IEnumerator<string> GetEnumerator()
{
return fields.Keys.GetEnumerator(); // LUCENENET NOTE: enumerators are not writable in .NET
}
public override Terms GetTerms(string field)
{
FieldsProducer fieldsProducer;
if (fields.TryGetValue(field, out fieldsProducer) && fieldsProducer != null)
{
return fieldsProducer.GetTerms(field);
}
return null;
}
public override int Count
{
get { return fields.Count; }
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
IOUtils.Dispose(formats.Values);
}
}
public override long RamBytesUsed()
{
long sizeInBytes = 0;
foreach (KeyValuePair<string, FieldsProducer> entry in formats)
{
sizeInBytes += entry.Key.Length * RamUsageEstimator.NUM_BYTES_CHAR;
sizeInBytes += entry.Value.RamBytesUsed();
}
return sizeInBytes;
}
public override void CheckIntegrity()
{
foreach (FieldsProducer producer in formats.Values)
{
producer.CheckIntegrity();
}
}
}
public override sealed FieldsProducer FieldsProducer(SegmentReadState state)
{
return new FieldsReader(this, state);
}
/// <summary>
/// Returns the postings format that should be used for writing
/// new segments of <paramref name="field"/>.
/// <para/>
/// The field to format mapping is written to the index, so
/// this method is only invoked when writing, not when reading.
/// </summary>
public abstract PostingsFormat GetPostingsFormatForField(string field);
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Connections;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal;
using Microsoft.AspNetCore.Server.Kestrel.Https;
using Microsoft.AspNetCore.Testing;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Xunit;
namespace Microsoft.AspNetCore.Server.Kestrel.Core.Tests
{
public class AddressBinderTests
{
[Theory]
[InlineData("http://10.10.10.10:5000/", "10.10.10.10", 5000)]
[InlineData("http://[::1]:5000", "::1", 5000)]
[InlineData("http://[::1]", "::1", 80)]
[InlineData("http://127.0.0.1", "127.0.0.1", 80)]
[InlineData("https://127.0.0.1", "127.0.0.1", 443)]
public void CorrectIPEndpointsAreCreated(string address, string expectedAddress, int expectedPort)
{
Assert.True(AddressBinder.TryCreateIPEndPoint(
BindingAddress.Parse(address), out var endpoint));
Assert.NotNull(endpoint);
Assert.Equal(IPAddress.Parse(expectedAddress), endpoint.Address);
Assert.Equal(expectedPort, endpoint.Port);
}
[Theory]
[InlineData("http://*")]
[InlineData("http://*:5000")]
[InlineData("http://+:80")]
[InlineData("http://+")]
[InlineData("http://randomhost:6000")]
[InlineData("http://randomhost")]
[InlineData("https://randomhost")]
public void DoesNotCreateIPEndPointOnInvalidIPAddress(string address)
{
Assert.False(AddressBinder.TryCreateIPEndPoint(
BindingAddress.Parse(address), out var endpoint));
}
[Theory]
[InlineData("*")]
[InlineData("randomhost")]
[InlineData("+")]
[InlineData("contoso.com")]
public void ParseAddressDefaultsToAnyIPOnInvalidIPAddress(string host)
{
var listenOptions = AddressBinder.ParseAddress($"http://{host}", out var https);
Assert.IsType<AnyIPListenOptions>(listenOptions);
Assert.IsType<IPEndPoint>(listenOptions.EndPoint);
Assert.Equal(IPAddress.IPv6Any, listenOptions.IPEndPoint.Address);
Assert.Equal(80, listenOptions.IPEndPoint.Port);
Assert.False(https);
}
[Fact]
public void ParseAddressLocalhost()
{
var listenOptions = AddressBinder.ParseAddress("http://localhost", out var https);
Assert.IsType<LocalhostListenOptions>(listenOptions);
Assert.IsType<IPEndPoint>(listenOptions.EndPoint);
Assert.Equal(IPAddress.Loopback, listenOptions.IPEndPoint.Address);
Assert.Equal(80, listenOptions.IPEndPoint.Port);
Assert.False(https);
}
[ConditionalFact]
[OSSkipCondition(OperatingSystems.Windows, SkipReason = "tmp/kestrel-test.sock is not valid for windows. Unix socket path must be absolute.")]
public void ParseAddressUnixPipe()
{
var listenOptions = AddressBinder.ParseAddress("http://unix:/tmp/kestrel-test.sock", out var https);
Assert.IsType<UnixDomainSocketEndPoint>(listenOptions.EndPoint);
Assert.Equal("/tmp/kestrel-test.sock", listenOptions.SocketPath);
Assert.False(https);
}
[ConditionalFact]
[OSSkipCondition(OperatingSystems.Linux | OperatingSystems.MacOSX, SkipReason = "Windows has drive letters and volume separator (c:), testing this url on unix or osx provides completely different output.")]
[MinimumOSVersion(OperatingSystems.Windows, WindowsVersions.Win10_RS4)]
public void ParseAddressUnixPipeOnWindows()
{
var listenOptions = AddressBinder.ParseAddress(@"http://unix:/c:/foo/bar/pipe.socket", out var https);
Assert.IsType<UnixDomainSocketEndPoint>(listenOptions.EndPoint);
Assert.Equal("c:/foo/bar/pipe.socket", listenOptions.SocketPath);
Assert.False(https);
}
[Theory]
[InlineData("http://10.10.10.10:5000/", "10.10.10.10", 5000, false)]
[InlineData("http://[::1]:5000", "::1", 5000, false)]
[InlineData("http://[::1]", "::1", 80, false)]
[InlineData("http://127.0.0.1", "127.0.0.1", 80, false)]
[InlineData("https://127.0.0.1", "127.0.0.1", 443, true)]
public void ParseAddressIP(string address, string ip, int port, bool isHttps)
{
var listenOptions = AddressBinder.ParseAddress(address, out var https);
Assert.IsType<IPEndPoint>(listenOptions.EndPoint);
Assert.Equal(IPAddress.Parse(ip), listenOptions.IPEndPoint.Address);
Assert.Equal(port, listenOptions.IPEndPoint.Port);
Assert.Equal(isHttps, https);
}
[Fact]
public async Task WrapsAddressInUseExceptionAsIOException()
{
var addresses = new ServerAddressesFeature();
addresses.InternalCollection.Add("http://localhost:5000");
var options = new KestrelServerOptions();
var addressBindContext = TestContextFactory.CreateAddressBindContext(
addresses,
options,
NullLogger.Instance,
endpoint => throw new AddressInUseException("already in use"));
await Assert.ThrowsAsync<IOException>(() =>
AddressBinder.BindAsync(options.ListenOptions, addressBindContext, CancellationToken.None));
}
[Fact]
public void LogsWarningWhenHostingAddressesAreOverridden()
{
var logger = new TestApplicationErrorLogger();
var overriddenAddress = "http://localhost:5000";
var addresses = new ServerAddressesFeature();
addresses.InternalCollection.Add(overriddenAddress);
var options = new KestrelServerOptions();
options.ListenAnyIP(8080);
var addressBindContext = TestContextFactory.CreateAddressBindContext(
addresses,
options,
logger,
endpoint => Task.CompletedTask);
var bindTask = AddressBinder.BindAsync(options.ListenOptions, addressBindContext, CancellationToken.None);
Assert.True(bindTask.IsCompletedSuccessfully);
var log = Assert.Single(logger.Messages);
Assert.Equal(LogLevel.Warning, log.LogLevel);
Assert.Equal(CoreStrings.FormatOverridingWithKestrelOptions(overriddenAddress), log.Message);
}
[Fact]
public void LogsInformationWhenKestrelAddressesAreOverridden()
{
var logger = new TestApplicationErrorLogger();
var overriddenAddress = "http://localhost:5000";
var addresses = new ServerAddressesFeature();
addresses.InternalCollection.Add(overriddenAddress);
var options = new KestrelServerOptions();
options.ListenAnyIP(8080);
var addressBindContext = TestContextFactory.CreateAddressBindContext(
addresses,
options,
logger,
endpoint => Task.CompletedTask);
addressBindContext.ServerAddressesFeature.PreferHostingUrls = true;
var bindTask = AddressBinder.BindAsync(options.ListenOptions, addressBindContext, CancellationToken.None);
Assert.True(bindTask.IsCompletedSuccessfully);
var log = Assert.Single(logger.Messages);
Assert.Equal(LogLevel.Information, log.LogLevel);
Assert.Equal(CoreStrings.FormatOverridingWithPreferHostingUrls(nameof(addressBindContext.ServerAddressesFeature.PreferHostingUrls), overriddenAddress), log.Message);
}
[Fact]
public async Task FlowsCancellationTokenToCreateBinddingCallback()
{
var addresses = new ServerAddressesFeature();
addresses.InternalCollection.Add("http://localhost:5000");
var options = new KestrelServerOptions();
var addressBindContext = TestContextFactory.CreateAddressBindContext(
addresses,
options,
NullLogger.Instance,
(endpoint, cancellationToken) =>
{
cancellationToken.ThrowIfCancellationRequested();
return Task.CompletedTask;
});
await Assert.ThrowsAsync<OperationCanceledException>(() =>
AddressBinder.BindAsync(options.ListenOptions, addressBindContext, new CancellationToken(true)));
}
[Theory]
[InlineData("http://*:80")]
[InlineData("http://+:80")]
[InlineData("http://contoso.com:80")]
public async Task FallbackToIPv4WhenIPv6AnyBindFails(string address)
{
var logger = new MockLogger();
var addresses = new ServerAddressesFeature();
addresses.InternalCollection.Add(address);
var options = new KestrelServerOptions();
var ipV6Attempt = false;
var ipV4Attempt = false;
var addressBindContext = TestContextFactory.CreateAddressBindContext(
addresses,
options,
logger,
endpoint =>
{
if (endpoint.IPEndPoint.Address == IPAddress.IPv6Any)
{
ipV6Attempt = true;
throw new InvalidOperationException("EAFNOSUPPORT");
}
if (endpoint.IPEndPoint.Address == IPAddress.Any)
{
ipV4Attempt = true;
}
return Task.CompletedTask;
});
await AddressBinder.BindAsync(options.ListenOptions, addressBindContext, CancellationToken.None);
Assert.True(ipV4Attempt, "Should have attempted to bind to IPAddress.Any");
Assert.True(ipV6Attempt, "Should have attempted to bind to IPAddress.IPv6Any");
Assert.Contains(logger.Messages, f => f.Equals(CoreStrings.FormatFallbackToIPv4Any(80)));
}
[Fact]
public async Task DefaultAddressBinderWithoutDevCertButHttpsConfiguredBindsToHttpsPorts()
{
var x509Certificate2 = TestResources.GetTestCertificate();
var logger = new MockLogger();
var addresses = new ServerAddressesFeature();
var services = new ServiceCollection();
services.AddLogging();
var options = new KestrelServerOptions()
{
// This stops the dev cert from being loaded
IsDevCertLoaded = true,
ApplicationServices = services.BuildServiceProvider()
};
options.ConfigureEndpointDefaults(e =>
{
if (e.IPEndPoint.Port == 5001)
{
e.UseHttps(new HttpsConnectionAdapterOptions { ServerCertificate = x509Certificate2 });
}
});
var endpoints = new List<ListenOptions>();
var addressBindContext = TestContextFactory.CreateAddressBindContext(
addresses,
options,
logger,
listenOptions =>
{
endpoints.Add(listenOptions);
return Task.CompletedTask;
});
await AddressBinder.BindAsync(options.ListenOptions, addressBindContext, CancellationToken.None);
Assert.Contains(endpoints, e => e.IPEndPoint.Port == 5000 && !e.IsTls);
Assert.Contains(endpoints, e => e.IPEndPoint.Port == 5001 && e.IsTls);
}
}
}
| |
using Lucene.Net.Support.IO;
using Lucene.Net.Util;
using System;
using System.IO;
using System.Collections.Generic;
namespace Lucene.Net.Store
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// <para>Implements <see cref="LockFactory"/> using native OS file
/// locks. For NFS based access to an index, it's
/// recommended that you try <see cref="SimpleFSLockFactory"/>
/// first and work around the one limitation that a lock file
/// could be left when the runtime exits abnormally.</para>
///
/// <para>The primary benefit of <see cref="NativeFSLockFactory"/> is
/// that locks (not the lock file itsself) will be properly
/// removed (by the OS) if the runtime has an abnormal exit.</para>
///
/// <para>Note that, unlike <see cref="SimpleFSLockFactory"/>, the existence of
/// leftover lock files in the filesystem is fine because the OS
/// will free the locks held against these files even though the
/// files still remain. Lucene will never actively remove the lock
/// files, so although you see them, the index may not be locked.</para>
///
/// <para>Special care needs to be taken if you change the locking
/// implementation: First be certain that no writer is in fact
/// writing to the index otherwise you can easily corrupt
/// your index. Be sure to do the <see cref="LockFactory"/> change on all Lucene
/// instances and clean up all leftover lock files before starting
/// the new configuration for the first time. Different implementations
/// can not work together!</para>
///
/// <para>If you suspect that this or any other <see cref="LockFactory"/> is
/// not working properly in your environment, you can easily
/// test it by using <see cref="VerifyingLockFactory"/>,
/// <see cref="LockVerifyServer"/> and <see cref="LockStressTest"/>.</para>
/// </summary>
/// <seealso cref="LockFactory"/>
// LUCENENET specific - this class has been refactored significantly from its Java counterpart
// to take advantage of .NET FileShare locking in the Windows and Linux environments.
public class NativeFSLockFactory : FSLockFactory
{
internal enum FSLockingStrategy
{
FileStreamLockViolation,
FileSharingViolation,
Fallback
}
// LUCENENET: This controls the locking strategy used for the current operating system and framework
internal static FSLockingStrategy LockingStrategy
{
get
{
if (IS_FILESTREAM_LOCKING_PLATFORM && HRESULT_FILE_LOCK_VIOLATION.HasValue)
return FSLockingStrategy.FileStreamLockViolation;
else if (HRESULT_FILE_SHARE_VIOLATION.HasValue)
return FSLockingStrategy.FileSharingViolation;
else
// Fallback implementation for unknown platforms that don't rely on HResult
return FSLockingStrategy.Fallback;
}
}
// LUCNENENET NOTE: Lookup the HResult value we are interested in for the current OS
// by provoking the exception during initialization and caching its HResult value for later.
// We optimize for Windows because those HResult values are known and documented, but for
// other platforms, this is the only way we can reliably determine the HResult values
// we are interested in.
//
// Reference: https://stackoverflow.com/q/46380483
private static readonly bool IS_FILESTREAM_LOCKING_PLATFORM = LoadIsFileStreamLockingPlatform();
private const int WIN_HRESULT_FILE_LOCK_VIOLATION = unchecked((int)0x80070021);
private const int WIN_HRESULT_FILE_SHARE_VIOLATION = unchecked((int)0x80070020);
internal static readonly int? HRESULT_FILE_LOCK_VIOLATION = LoadFileLockViolationHResult();
internal static readonly int? HRESULT_FILE_SHARE_VIOLATION = LoadFileShareViolationHResult();
private static bool LoadIsFileStreamLockingPlatform()
{
#if FEATURE_FILESTREAM_LOCK
return Constants.WINDOWS; // LUCENENET: See: https://github.com/dotnet/corefx/issues/5964
#else
return false;
#endif
}
private static int? LoadFileLockViolationHResult()
{
if (Constants.WINDOWS)
return WIN_HRESULT_FILE_LOCK_VIOLATION;
// Skip provoking the exception unless we know we will use the value
if (IS_FILESTREAM_LOCKING_PLATFORM)
{
return FileSupport.GetFileIOExceptionHResult(provokeException: (fileName) =>
{
using (var lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite))
{
lockStream.Lock(0, 1); // Create an exclusive lock
using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.ReadWrite))
{
// try to find out if the file is locked by writing a byte. Note that we need to flush the stream to find out.
stream.WriteByte(0);
stream.Flush(); // this *may* throw an IOException if the file is locked, but...
// ... closing the stream is the real test
}
}
});
}
return null;
}
private static int? LoadFileShareViolationHResult()
{
if (Constants.WINDOWS)
return WIN_HRESULT_FILE_SHARE_VIOLATION;
return FileSupport.GetFileIOExceptionHResult(provokeException: (fileName) =>
{
using (var lockStream = new FileStream(fileName, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Read, 1, FileOptions.None))
// Try to get an exclusive lock on the file - this should throw an IOException with the current platform's HResult value for FileShare violation
using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Write, FileShare.None, 1, FileOptions.None))
{
}
});
}
/// <summary>
/// Create a <see cref="NativeFSLockFactory"/> instance, with <c>null</c> (unset)
/// lock directory. When you pass this factory to a <see cref="FSDirectory"/>
/// subclass, the lock directory is automatically set to the
/// directory itself. Be sure to create one instance for each directory
/// your create!
/// </summary>
public NativeFSLockFactory()
: this((DirectoryInfo)null)
{
}
/// <summary>
/// Create a <see cref="NativeFSLockFactory"/> instance, storing lock
/// files into the specified <paramref name="lockDirName"/>
/// </summary>
/// <param name="lockDirName"> where lock files are created. </param>
public NativeFSLockFactory(string lockDirName)
: this(new DirectoryInfo(lockDirName))
{
}
/// <summary>
/// Create a <see cref="NativeFSLockFactory"/> instance, storing lock
/// files into the specified <paramref name="lockDir"/>
/// </summary>
/// <param name="lockDir"> where lock files are created. </param>
public NativeFSLockFactory(DirectoryInfo lockDir)
{
SetLockDir(lockDir);
}
// LUCENENET: NativeFSLocks in Java are infact singletons; this is how we mimick that to track instances and make sure
// IW.Unlock and IW.IsLocked works correctly
internal static readonly Dictionary<string, Lock> _locks = new Dictionary<string, Lock>();
/// <summary>
/// Given a lock name, return the full prefixed path of the actual lock file.
/// </summary>
/// <param name="lockName"></param>
/// <returns></returns>
private string GetCanonicalPathOfLockFile(string lockName)
{
if (m_lockPrefix != null)
{
lockName = m_lockPrefix + "-" + lockName;
}
return new FileInfo(Path.Combine(m_lockDir.FullName, lockName)).GetCanonicalPath();
}
public override Lock MakeLock(string lockName)
{
var path = GetCanonicalPathOfLockFile(lockName);
Lock l;
lock (_locks)
if (!_locks.TryGetValue(path, out l))
_locks.Add(path, l = NewLock(path));
return l;
}
// Internal for testing
internal virtual Lock NewLock(string path)
{
switch (LockingStrategy)
{
case FSLockingStrategy.FileStreamLockViolation:
return new NativeFSLock(m_lockDir, path);
case FSLockingStrategy.FileSharingViolation:
return new SharingNativeFSLock(m_lockDir, path);
default:
// Fallback implementation for unknown platforms that don't rely on HResult
return new FallbackNativeFSLock(m_lockDir, path);
}
}
public override void ClearLock(string lockName)
{
var path = GetCanonicalPathOfLockFile(lockName);
// this is the reason why we can't use ConcurrentDictionary: we need the removal and disposal of the lock to be atomic
// otherwise it may clash with MakeLock making a lock and ClearLock disposing of it in another thread.
lock (_locks)
if (_locks.TryGetValue(path, out Lock l))
{
_locks.Remove(path);
l.Dispose();
}
}
}
// LUCENENET NOTE: We use this implementation as a fallback for platforms that we don't
// know the HResult numbers for lock and file sharing errors.
//
// Note that using NativeFSLock would be ideal for all platforms. However, there is a
// small chance that provoking lock/share exceptions will fail. In that rare case, we
// fallback to this substandard implementation.
//
// Reference: https://stackoverflow.com/q/46380483
internal class FallbackNativeFSLock : Lock
{
private FileStream channel;
private readonly string path;
private readonly DirectoryInfo lockDir;
public FallbackNativeFSLock(DirectoryInfo lockDir, string path)
{
this.lockDir = lockDir;
this.path = path;
}
public override bool Obtain()
{
lock (this)
{
FailureReason = null;
if (channel != null)
{
// Our instance is already locked:
return false;
}
if (!System.IO.Directory.Exists(lockDir.FullName))
{
try
{
System.IO.Directory.CreateDirectory(lockDir.FullName);
}
catch
{
throw new IOException("Cannot create directory: " + lockDir.FullName);
}
}
else if (File.Exists(lockDir.FullName))
{
throw new IOException("Found regular file where directory expected: " + lockDir.FullName);
}
var success = false;
try
{
// LUCENENET: Allow read access for the RAMDirectory to be able to copy the lock file.
channel = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.Read);
success = true;
}
catch (IOException e)
{
FailureReason = e;
}
// LUCENENET: UnauthorizedAccessException does not derive from IOException like in java
catch (UnauthorizedAccessException e)
{
// At least on OS X, we will sometimes get an
// intermittent "Permission Denied" Exception,
// which seems to simply mean "you failed to get
// the lock". But other IOExceptions could be
// "permanent" (eg, locking is not supported via
// the filesystem). So, we record the failure
// reason here; the timeout obtain (usually the
// one calling us) will use this as "root cause"
// if it fails to get the lock.
FailureReason = e;
}
finally
{
if (!success)
{
IOUtils.DisposeWhileHandlingException(channel);
channel = null;
}
}
return channel != null;
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
lock (this)
{
// whether or not we have created a file, we need to remove
// the lock instance from the dictionary that tracks them.
try
{
lock (NativeFSLockFactory._locks)
NativeFSLockFactory._locks.Remove(path);
}
finally
{
if (channel != null)
{
IOUtils.DisposeWhileHandlingException(channel);
channel = null;
bool tmpBool;
if (File.Exists(path))
{
File.Delete(path);
tmpBool = true;
}
else if (System.IO.Directory.Exists(path))
{
System.IO.Directory.Delete(path);
tmpBool = true;
}
else
{
tmpBool = false;
}
if (!tmpBool)
{
throw new LockReleaseFailedException("failed to delete " + path);
}
}
}
}
}
}
public override bool IsLocked()
{
lock (this)
{
// The test for is isLocked is not directly possible with native file locks:
// First a shortcut, if a lock reference in this instance is available
if (channel != null)
{
return true;
}
// Look if lock file is present; if not, there can definitely be no lock!
bool tmpBool;
if (System.IO.File.Exists(path))
tmpBool = true;
else
tmpBool = System.IO.Directory.Exists(path);
if (!tmpBool)
return false;
// Try to obtain and release (if was locked) the lock
try
{
bool obtained = Obtain();
if (obtained)
{
Dispose();
}
return !obtained;
}
catch (IOException)
{
return false;
}
}
}
public override string ToString()
{
return $"{nameof(FallbackNativeFSLock)}@{path}";
}
}
// Locks the entire file. macOS requires this approach.
internal class SharingNativeFSLock : Lock
{
private FileStream channel;
private readonly string path;
private readonly DirectoryInfo lockDir;
public SharingNativeFSLock(DirectoryInfo lockDir, string path)
{
this.lockDir = lockDir;
this.path = path;
}
/// <summary>
/// Return true if the <see cref="IOException"/> is the result of a share violation
/// </summary>
private bool IsShareViolation(IOException e)
{
return e.HResult == NativeFSLockFactory.HRESULT_FILE_SHARE_VIOLATION;
}
private FileStream GetLockFileStream(FileMode mode)
{
if (!System.IO.Directory.Exists(lockDir.FullName))
{
try
{
System.IO.Directory.CreateDirectory(lockDir.FullName);
}
catch (Exception e)
{
// note that several processes might have been trying to create the same directory at the same time.
// if one succeeded, the directory will exist and the exception can be ignored. In all other cases we should report it.
if (!System.IO.Directory.Exists(lockDir.FullName))
throw new IOException("Cannot create directory: " + lockDir.FullName, e);
}
}
else if (File.Exists(lockDir.FullName))
{
throw new IOException("Found regular file where directory expected: " + lockDir.FullName);
}
return new FileStream(
path,
mode,
FileAccess.Write,
// LUCENENET: Allow read access of OpenOrCreate for the RAMDirectory to be able to copy the lock file.
// For the Open case, set to FileShare.None to force a file share exception in IsLocked().
share: mode == FileMode.Open ? FileShare.None : FileShare.Read,
bufferSize: 1,
options: mode == FileMode.Open ? FileOptions.None : FileOptions.DeleteOnClose);
}
public override bool Obtain()
{
lock (this)
{
FailureReason = null;
if (channel != null)
{
// Our instance is already locked:
return false;
}
try
{
channel = GetLockFileStream(FileMode.OpenOrCreate);
}
catch (IOException e) when (IsShareViolation(e))
{
// no failure reason to be recorded, since this is the expected error if a lock exists
}
catch (IOException e)
{
FailureReason = e;
}
// LUCENENET: UnauthorizedAccessException does not derive from IOException like in java
catch (UnauthorizedAccessException e)
{
// At least on OS X, we will sometimes get an
// intermittent "Permission Denied" Exception,
// which seems to simply mean "you failed to get
// the lock". But other IOExceptions could be
// "permanent" (eg, locking is not supported via
// the filesystem). So, we record the failure
// reason here; the timeout obtain (usually the
// one calling us) will use this as "root cause"
// if it fails to get the lock.
FailureReason = e;
}
return channel != null;
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
lock (this)
{
// whether or not we have created a file, we need to remove
// the lock instance from the dictionary that tracks them.
try
{
lock (NativeFSLockFactory._locks)
NativeFSLockFactory._locks.Remove(path);
}
finally
{
if (channel != null)
{
try
{
IOUtils.DisposeWhileHandlingException(channel);
}
finally
{
channel = null;
}
}
}
}
}
}
public override bool IsLocked()
{
lock (this)
{
// First a shortcut, if a lock reference in this instance is available
if (channel != null)
{
return true;
}
try
{
using (var stream = GetLockFileStream(FileMode.Open))
{
}
return false;
}
catch (IOException e) when (IsShareViolation(e))
{
return true;
}
catch (FileNotFoundException)
{
// if the file doesn't exists, there can be no lock active
return false;
}
}
}
public override string ToString()
{
return $"{nameof(SharingNativeFSLock)}@{path}";
}
}
// Uses FileStream locking of file pages.
internal class NativeFSLock : Lock
{
private FileStream channel;
private readonly string path;
private readonly DirectoryInfo lockDir;
public NativeFSLock(DirectoryInfo lockDir, string path)
{
this.lockDir = lockDir;
this.path = path;
}
/// <summary>
/// Return true if the <see cref="IOException"/> is the result of a lock violation
/// </summary>
private bool IsLockViolation(IOException e)
{
return e.HResult == NativeFSLockFactory.HRESULT_FILE_LOCK_VIOLATION;
}
private FileStream GetLockFileStream(FileMode mode)
{
if (!System.IO.Directory.Exists(lockDir.FullName))
{
try
{
System.IO.Directory.CreateDirectory(lockDir.FullName);
}
catch (Exception e)
{
// note that several processes might have been trying to create the same directory at the same time.
// if one succeeded, the directory will exist and the exception can be ignored. In all other cases we should report it.
if (!System.IO.Directory.Exists(lockDir.FullName))
throw new IOException("Cannot create directory: " + lockDir.FullName, e);
}
}
else if (File.Exists(lockDir.FullName))
{
throw new IOException("Found regular file where directory expected: " + lockDir.FullName);
}
return new FileStream(path, mode, FileAccess.Write, FileShare.ReadWrite);
}
public override bool Obtain()
{
lock (this)
{
FailureReason = null;
if (channel != null)
{
// Our instance is already locked:
return false;
}
FileStream stream = null;
try
{
stream = GetLockFileStream(FileMode.OpenOrCreate);
}
catch (IOException e)
{
FailureReason = e;
}
// LUCENENET: UnauthorizedAccessException does not derive from IOException like in java
catch (UnauthorizedAccessException e)
{
// At least on OS X, we will sometimes get an
// intermittent "Permission Denied" Exception,
// which seems to simply mean "you failed to get
// the lock". But other IOExceptions could be
// "permanent" (eg, locking is not supported via
// the filesystem). So, we record the failure
// reason here; the timeout obtain (usually the
// one calling us) will use this as "root cause"
// if it fails to get the lock.
FailureReason = e;
}
if (stream != null)
{
try
{
stream.Lock(0, 1);
// only assign the channel if the lock succeeds
channel = stream;
}
catch (Exception e)
{
FailureReason = e;
IOUtils.DisposeWhileHandlingException(stream);
}
}
return channel != null;
}
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
lock (this)
{
// whether or not we have created a file, we need to remove
// the lock instance from the dictionary that tracks them.
try
{
lock (NativeFSLockFactory._locks)
NativeFSLockFactory._locks.Remove(path);
}
finally
{
if (channel != null)
{
try
{
IOUtils.DisposeWhileHandlingException(channel);
}
finally
{
channel = null;
}
// try to delete the file if we created it, but it's not an error if we can't.
try
{
File.Delete(path);
}
catch
{
}
}
}
}
}
}
public override bool IsLocked()
{
lock (this)
{
// First a shortcut, if a lock reference in this instance is available
if (channel != null)
{
return true;
}
try
{
using (var stream = GetLockFileStream(FileMode.Open))
{
// try to find out if the file is locked by writing a byte. Note that we need to flush the stream to find out.
stream.WriteByte(0);
stream.Flush(); // this *may* throw an IOException if the file is locked, but...
// ... closing the stream is the real test
}
return false;
}
catch (IOException e) when (IsLockViolation(e))
{
return true;
}
catch (FileNotFoundException)
{
// if the file doesn't exists, there can be no lock active
return false;
}
}
}
public override string ToString()
{
return $"{nameof(NativeFSLock)}@{path}";
}
}
#if !FEATURE_FILESTREAM_LOCK
internal static class FileStreamExtensions
{
// Dummy lock method to ensure we can compile even if the feature is unavailable
public static void Lock(this FileStream stream, long position, long length)
{
}
}
#endif
}
| |
#region Copyright
/*Copyright (C) 2015 Konstantin Udilovich
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Kodestruct.Analysis.BeamForces.Simple
{
public class DistributedUniformlyIncreasingToEnd : ISingleLoadCaseBeam, ISingleLoadCaseDeflectionBeam
{
const string CASE = "C1D_1";
BeamSimple beam;
double w;
double L;
double W;
double E, I;
bool ResultantCalculated;
ForceDataPoint _Mx;
public ForceDataPoint Mmax
{
get
{
if (_Mx == null)
{
_Mx = CalculateMmax();
}
return _Mx;
}
}
public DistributedUniformlyIncreasingToEnd(BeamSimple beam, double w)
{
this.beam = beam;
L = beam.Length;
this.w = w;
E = beam.ModulusOfElasticity;
I = beam.MomentOfInertia;
ResultantCalculated =false;
}
public double Moment(double X)
{
beam.EvaluateX(X);
double M;
if (ResultantCalculated==false)
{
CalculateResultantLoad();
}
double Mtest = W * X / (3.0 * Math.Pow(L, 2)) * (Math.Pow(L, 2.0) - Math.Pow(X, 2.0));
M = w * X / (6.0 * L) * (Math.Pow(L, 2.0) - Math.Pow(X, 2.0));
BeamEntryFactory.CreateEntry("Mx", M, BeamTemplateType.Mx, 1,
new Dictionary<string, double>()
{
{"L",L },
{"X",X },
{"w",w },
}, CASE, beam);
return M;
}
public ForceDataPoint MomentMax()
{
if (w>=0)
{
//Mmax = 2 * w * Math.Pow(L,2) / (9.0 * Math.Sqrt(3.0));
BeamEntryFactory.CreateEntry("Mx", Mmax.Value, BeamTemplateType.Mmax, 1,
new Dictionary<string, double>()
{
{"L",L },
{"X",Mmax.X },
{"w",w },
}, CASE, beam, true);
return Mmax;
}
else
{
BeamEntryFactory.CreateEntry("Mx", 0.0, BeamTemplateType.M_zero, 0,
null, CASE, beam, true);
return new ForceDataPoint(0.0, 0.0);
}
}
public ForceDataPoint MomentMin()
{
if (w >= 0)
{
BeamEntryFactory.CreateEntry("Mx", 0.0, BeamTemplateType.M_zero, 0,
null, CASE, beam, true);
return new ForceDataPoint(0.0, 0.0);
}
else
{
BeamEntryFactory.CreateEntry("Mx", Mmax.Value, BeamTemplateType.Mmax, 1,
new Dictionary<string, double>()
{
{"L",L },
{"X",Mmax.X },
{"w",w }
}, CASE, beam, false, true);
return Mmax;
}
}
public double Shear(double X)
{
beam.EvaluateX(X);
if (ResultantCalculated == false)
{
CalculateResultantLoad();
}
double W = L * w / 2.0;
double V;
double Vtest = W/3.0-(W*Math.Pow(X,2.0))/Math.Pow(L,2.0);
V = w*L / 6.0 - (w *L* Math.Pow(X, 2.0)) /(2* Math.Pow(L, 2.0));
BeamEntryFactory.CreateEntry("Vx", V, BeamTemplateType.Vx, 1,
new Dictionary<string, double>()
{
{"L",L },
{"X",X },
{"w",w },
}, CASE, beam);
return V;
}
public ForceDataPoint ShearMax()
{
double Vmax;
double VmaxTest= 2.0 / 3.0 * W;
Vmax= w*L/ 3.0;
BeamEntryFactory.CreateEntry("Vx", Vmax, BeamTemplateType.Vmax, 1,
new Dictionary<string, double>()
{
{"L",L },
{"w",w }
}, CASE, beam, true);
return new ForceDataPoint(L, Vmax);
}
private void CalculateResultantLoad()
{
W = L * w / 2.0;
ResultantCalculated =true;
}
private ForceDataPoint CalculateMmax()
{
double MmaxTest = 2*W*L/(9.0*Math.Sqrt(3.0));
double Mmax = w * Math.Pow(L,2) / (9.0 * Math.Sqrt(3.0));
double Xmax = L / Math.Sqrt(3.0);
return new ForceDataPoint(Xmax, Mmax);
}
public double MaximumDeflection()
{
double E = beam.ModulusOfElasticity;
double I = beam.MomentOfInertia;
double delta = 0.00652 * ((w * Math.Pow(L, 4)) / (E * I));
BeamEntryFactory.CreateEntry("delta", delta, BeamTemplateType.deltaMax, 0,
new Dictionary<string, double>()
{
{"w",w },
{"E",E},
{"I",I },
{"L",L}
}, CASE, beam);
return delta;
}
public double Deflection(double X)
{
double E = beam.ModulusOfElasticity;
double I = beam.MomentOfInertia;
double delta = ((w * X) / (360 * E * I * L)) * (3 * Math.Pow(X, 4) - 10 * L * L * X * X + 7 * Math.Pow(L, 4));
BeamEntryFactory.CreateEntry("delta", delta, BeamTemplateType.delta, 0,
new Dictionary<string, double>()
{
{"X",X },
{"w",w },
{"E",E},
{"I",I },
{"L",L}
}, CASE, beam);
return delta;
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V10.Services
{
/// <summary>Settings for <see cref="RecommendationServiceClient"/> instances.</summary>
public sealed partial class RecommendationServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="RecommendationServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="RecommendationServiceSettings"/>.</returns>
public static RecommendationServiceSettings GetDefault() => new RecommendationServiceSettings();
/// <summary>
/// Constructs a new <see cref="RecommendationServiceSettings"/> object with default settings.
/// </summary>
public RecommendationServiceSettings()
{
}
private RecommendationServiceSettings(RecommendationServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
ApplyRecommendationSettings = existing.ApplyRecommendationSettings;
DismissRecommendationSettings = existing.DismissRecommendationSettings;
OnCopy(existing);
}
partial void OnCopy(RecommendationServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>RecommendationServiceClient.ApplyRecommendation</c> and
/// <c>RecommendationServiceClient.ApplyRecommendationAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings ApplyRecommendationSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>RecommendationServiceClient.DismissRecommendation</c> and
/// <c>RecommendationServiceClient.DismissRecommendationAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings DismissRecommendationSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="RecommendationServiceSettings"/> object.</returns>
public RecommendationServiceSettings Clone() => new RecommendationServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="RecommendationServiceClient"/> to provide simple configuration of credentials,
/// endpoint etc.
/// </summary>
internal sealed partial class RecommendationServiceClientBuilder : gaxgrpc::ClientBuilderBase<RecommendationServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public RecommendationServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public RecommendationServiceClientBuilder()
{
UseJwtAccessWithScopes = RecommendationServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref RecommendationServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<RecommendationServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override RecommendationServiceClient Build()
{
RecommendationServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<RecommendationServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<RecommendationServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private RecommendationServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return RecommendationServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<RecommendationServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return RecommendationServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => RecommendationServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => RecommendationServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => RecommendationServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>RecommendationService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to manage recommendations.
/// </remarks>
public abstract partial class RecommendationServiceClient
{
/// <summary>
/// The default endpoint for the RecommendationService service, which is a host of "googleads.googleapis.com"
/// and a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default RecommendationService scopes.</summary>
/// <remarks>
/// The default RecommendationService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="RecommendationServiceClient"/> using the default credentials, endpoint
/// and settings. To specify custom credentials or other settings, use
/// <see cref="RecommendationServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="RecommendationServiceClient"/>.</returns>
public static stt::Task<RecommendationServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new RecommendationServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="RecommendationServiceClient"/> using the default credentials, endpoint
/// and settings. To specify custom credentials or other settings, use
/// <see cref="RecommendationServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="RecommendationServiceClient"/>.</returns>
public static RecommendationServiceClient Create() => new RecommendationServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="RecommendationServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="RecommendationServiceSettings"/>.</param>
/// <returns>The created <see cref="RecommendationServiceClient"/>.</returns>
internal static RecommendationServiceClient Create(grpccore::CallInvoker callInvoker, RecommendationServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
RecommendationService.RecommendationServiceClient grpcClient = new RecommendationService.RecommendationServiceClient(callInvoker);
return new RecommendationServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC RecommendationService client</summary>
public virtual RecommendationService.RecommendationServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ApplyRecommendationResponse ApplyRecommendation(ApplyRecommendationRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ApplyRecommendationResponse> ApplyRecommendationAsync(ApplyRecommendationRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ApplyRecommendationResponse> ApplyRecommendationAsync(ApplyRecommendationRequest request, st::CancellationToken cancellationToken) =>
ApplyRecommendationAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer with the recommendation.
/// </param>
/// <param name="operations">
/// Required. The list of operations to apply recommendations.
/// If partial_failure=false all recommendations should be of the same type
/// There is a limit of 100 operations per request.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual ApplyRecommendationResponse ApplyRecommendation(string customerId, scg::IEnumerable<ApplyRecommendationOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
ApplyRecommendation(new ApplyRecommendationRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer with the recommendation.
/// </param>
/// <param name="operations">
/// Required. The list of operations to apply recommendations.
/// If partial_failure=false all recommendations should be of the same type
/// There is a limit of 100 operations per request.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ApplyRecommendationResponse> ApplyRecommendationAsync(string customerId, scg::IEnumerable<ApplyRecommendationOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
ApplyRecommendationAsync(new ApplyRecommendationRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer with the recommendation.
/// </param>
/// <param name="operations">
/// Required. The list of operations to apply recommendations.
/// If partial_failure=false all recommendations should be of the same type
/// There is a limit of 100 operations per request.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<ApplyRecommendationResponse> ApplyRecommendationAsync(string customerId, scg::IEnumerable<ApplyRecommendationOperation> operations, st::CancellationToken cancellationToken) =>
ApplyRecommendationAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual DismissRecommendationResponse DismissRecommendation(DismissRecommendationRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<DismissRecommendationResponse> DismissRecommendationAsync(DismissRecommendationRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<DismissRecommendationResponse> DismissRecommendationAsync(DismissRecommendationRequest request, st::CancellationToken cancellationToken) =>
DismissRecommendationAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer with the recommendation.
/// </param>
/// <param name="operations">
/// Required. The list of operations to dismiss recommendations.
/// If partial_failure=false all recommendations should be of the same type
/// There is a limit of 100 operations per request.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual DismissRecommendationResponse DismissRecommendation(string customerId, scg::IEnumerable<DismissRecommendationRequest.Types.DismissRecommendationOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
DismissRecommendation(new DismissRecommendationRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer with the recommendation.
/// </param>
/// <param name="operations">
/// Required. The list of operations to dismiss recommendations.
/// If partial_failure=false all recommendations should be of the same type
/// There is a limit of 100 operations per request.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<DismissRecommendationResponse> DismissRecommendationAsync(string customerId, scg::IEnumerable<DismissRecommendationRequest.Types.DismissRecommendationOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
DismissRecommendationAsync(new DismissRecommendationRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer with the recommendation.
/// </param>
/// <param name="operations">
/// Required. The list of operations to dismiss recommendations.
/// If partial_failure=false all recommendations should be of the same type
/// There is a limit of 100 operations per request.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<DismissRecommendationResponse> DismissRecommendationAsync(string customerId, scg::IEnumerable<DismissRecommendationRequest.Types.DismissRecommendationOperation> operations, st::CancellationToken cancellationToken) =>
DismissRecommendationAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>RecommendationService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to manage recommendations.
/// </remarks>
public sealed partial class RecommendationServiceClientImpl : RecommendationServiceClient
{
private readonly gaxgrpc::ApiCall<ApplyRecommendationRequest, ApplyRecommendationResponse> _callApplyRecommendation;
private readonly gaxgrpc::ApiCall<DismissRecommendationRequest, DismissRecommendationResponse> _callDismissRecommendation;
/// <summary>
/// Constructs a client wrapper for the RecommendationService service, with the specified gRPC client and
/// settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="RecommendationServiceSettings"/> used within this client.</param>
public RecommendationServiceClientImpl(RecommendationService.RecommendationServiceClient grpcClient, RecommendationServiceSettings settings)
{
GrpcClient = grpcClient;
RecommendationServiceSettings effectiveSettings = settings ?? RecommendationServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callApplyRecommendation = clientHelper.BuildApiCall<ApplyRecommendationRequest, ApplyRecommendationResponse>(grpcClient.ApplyRecommendationAsync, grpcClient.ApplyRecommendation, effectiveSettings.ApplyRecommendationSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId);
Modify_ApiCall(ref _callApplyRecommendation);
Modify_ApplyRecommendationApiCall(ref _callApplyRecommendation);
_callDismissRecommendation = clientHelper.BuildApiCall<DismissRecommendationRequest, DismissRecommendationResponse>(grpcClient.DismissRecommendationAsync, grpcClient.DismissRecommendation, effectiveSettings.DismissRecommendationSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId);
Modify_ApiCall(ref _callDismissRecommendation);
Modify_DismissRecommendationApiCall(ref _callDismissRecommendation);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_ApplyRecommendationApiCall(ref gaxgrpc::ApiCall<ApplyRecommendationRequest, ApplyRecommendationResponse> call);
partial void Modify_DismissRecommendationApiCall(ref gaxgrpc::ApiCall<DismissRecommendationRequest, DismissRecommendationResponse> call);
partial void OnConstruction(RecommendationService.RecommendationServiceClient grpcClient, RecommendationServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC RecommendationService client</summary>
public override RecommendationService.RecommendationServiceClient GrpcClient { get; }
partial void Modify_ApplyRecommendationRequest(ref ApplyRecommendationRequest request, ref gaxgrpc::CallSettings settings);
partial void Modify_DismissRecommendationRequest(ref DismissRecommendationRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override ApplyRecommendationResponse ApplyRecommendation(ApplyRecommendationRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ApplyRecommendationRequest(ref request, ref callSettings);
return _callApplyRecommendation.Sync(request, callSettings);
}
/// <summary>
/// Applies given recommendations with corresponding apply parameters.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [DatabaseError]()
/// [FieldError]()
/// [HeaderError]()
/// [InternalError]()
/// [MutateError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// [UrlFieldError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<ApplyRecommendationResponse> ApplyRecommendationAsync(ApplyRecommendationRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_ApplyRecommendationRequest(ref request, ref callSettings);
return _callApplyRecommendation.Async(request, callSettings);
}
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override DismissRecommendationResponse DismissRecommendation(DismissRecommendationRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_DismissRecommendationRequest(ref request, ref callSettings);
return _callDismissRecommendation.Sync(request, callSettings);
}
/// <summary>
/// Dismisses given recommendations.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [HeaderError]()
/// [InternalError]()
/// [QuotaError]()
/// [RecommendationError]()
/// [RequestError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<DismissRecommendationResponse> DismissRecommendationAsync(DismissRecommendationRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_DismissRecommendationRequest(ref request, ref callSettings);
return _callDismissRecommendation.Async(request, callSettings);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.Entity.Core.Objects;
using System.Data.Entity.Infrastructure;
using System.Threading.Tasks;
using System.Transactions;
using Abp.Dependency;
using Abp.Domain.Uow;
using Abp.EntityFramework.Utils;
using Abp.Reflection;
using Castle.Core.Internal;
using System.Collections.Immutable;
using EntityFramework.DynamicFilters;
namespace Abp.EntityFramework.Uow
{
/// <summary>
/// Implements Unit of work for Entity Framework.
/// </summary>
public class EfUnitOfWork : UnitOfWorkBase, ITransientDependency
{
protected IDictionary<string, DbContext> ActiveDbContexts { get; private set; }
protected IIocResolver IocResolver { get; private set; }
protected TransactionScope CurrentTransaction;
private readonly IDbContextResolver _dbContextResolver;
/// <summary>
/// Creates a new <see cref="EfUnitOfWork"/>.
/// </summary>
public EfUnitOfWork(
IIocResolver iocResolver,
IDbContextResolver dbContextResolver,
IEfUnitOfWorkFilterExecuter filterExecuter,
IUnitOfWorkDefaultOptions defaultOptions,
IDbContextTypeMatcher dbContextTypeMatcher)
: base(
defaultOptions,
filterExecuter)
{
IocResolver = iocResolver;
_dbContextResolver = dbContextResolver;
ActiveDbContexts = new Dictionary<string, DbContext>();
}
protected override void BeginUow()
{
if (Options.IsTransactional == true)
{
var transactionOptions = new TransactionOptions
{
IsolationLevel = Options.IsolationLevel.GetValueOrDefault(IsolationLevel.ReadUncommitted),
};
if (Options.Timeout.HasValue)
{
transactionOptions.Timeout = Options.Timeout.Value;
}
CurrentTransaction = new TransactionScope(
Options.Scope.GetValueOrDefault(TransactionScopeOption.Required),
transactionOptions,
Options.AsyncFlowOption.GetValueOrDefault(TransactionScopeAsyncFlowOption.Enabled)
);
}
}
public override void SaveChanges()
{
ActiveDbContexts.Values.ForEach(SaveChangesInDbContext);
}
public override async Task SaveChangesAsync()
{
foreach (var dbContext in ActiveDbContexts.Values)
{
await SaveChangesInDbContextAsync(dbContext);
}
}
public IReadOnlyList<DbContext> GetAllActiveDbContexts()
{
return ActiveDbContexts.Values.ToImmutableList();
}
protected override void CompleteUow()
{
SaveChanges();
if (CurrentTransaction != null)
{
CurrentTransaction.Complete();
}
DisposeUow();
}
protected override async Task CompleteUowAsync()
{
await SaveChangesAsync();
if (CurrentTransaction != null)
{
CurrentTransaction.Complete();
}
DisposeUow();
}
protected override void ApplyDisableFilter(string filterName)
{
foreach (var activeDbContext in ActiveDbContexts.Values)
{
activeDbContext.DisableFilter(filterName);
}
}
protected override void ApplyEnableFilter(string filterName)
{
foreach (var activeDbContext in ActiveDbContexts.Values)
{
activeDbContext.EnableFilter(filterName);
}
}
protected override void ApplyFilterParameterValue(string filterName, string parameterName, object value)
{
foreach (var activeDbContext in ActiveDbContexts.Values)
{
if (TypeHelper.IsFunc<object>(value))
{
activeDbContext.SetFilterScopedParameterValue(filterName, parameterName, (Func<object>)value);
}
else
{
activeDbContext.SetFilterScopedParameterValue(filterName, parameterName, value);
}
}
}
public virtual TDbContext GetOrCreateDbContext<TDbContext>()
where TDbContext : DbContext
{
var dbContextKey = typeof (TDbContext).FullName;
DbContext dbContext;
if (!ActiveDbContexts.TryGetValue(dbContextKey, out dbContext))
{
dbContext = _dbContextResolver.Resolve<TDbContext>();
((IObjectContextAdapter)dbContext).ObjectContext.ObjectMaterialized += ObjectContext_ObjectMaterialized;
foreach (var filter in Filters)
{
if (filter.IsEnabled)
{
dbContext.EnableFilter(filter.FilterName);
}
else
{
dbContext.DisableFilter(filter.FilterName);
}
foreach (var filterParameter in filter.FilterParameters)
{
if (TypeHelper.IsFunc<object>(filterParameter.Value))
{
dbContext.SetFilterScopedParameterValue(filter.FilterName, filterParameter.Key, (Func<object>)filterParameter.Value);
}
else
{
dbContext.SetFilterScopedParameterValue(filter.FilterName, filterParameter.Key, filterParameter.Value);
}
}
}
ActiveDbContexts[dbContextKey] = dbContext;
}
return (TDbContext)dbContext;
}
protected override void DisposeUow()
{
ActiveDbContexts.Values.ForEach(Release);
ActiveDbContexts.Clear();
if (CurrentTransaction != null)
{
CurrentTransaction.Dispose();
CurrentTransaction = null;
}
}
protected virtual void SaveChangesInDbContext(DbContext dbContext)
{
dbContext.SaveChanges();
}
protected virtual async Task SaveChangesInDbContextAsync(DbContext dbContext)
{
await dbContext.SaveChangesAsync();
}
protected virtual void Release(DbContext dbContext)
{
dbContext.Dispose();
IocResolver.Release(dbContext);
}
private static void ObjectContext_ObjectMaterialized(object sender, ObjectMaterializedEventArgs e)
{
var entityType = ObjectContext.GetObjectType(e.Entity.GetType());
DateTimePropertyInfoHelper.NormalizeDatePropertyKinds(e.Entity,entityType);
}
}
}
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Microsoft.Azure.Commands.Common.Authentication;
using Microsoft.Azure.Commands.Common.Authentication.Abstractions;
using Microsoft.Azure.Commands.Sql.Common;
using Microsoft.Azure.Management.Sql;
using Microsoft.Azure.Management.Sql.Models;
using Microsoft.Azure.Management.Storage;
#if !NETSTANDARD
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Blob;
using System;
#endif
using System.Collections.Generic;
using System.Linq;
namespace Microsoft.Azure.Commands.Sql.VulnerabilityAssessment.Services
{
/// <summary>
/// This class is responsible for all the REST communication with the Vulnerability Assessment REST endpoints
/// </summary>
public class VulnerabilityAssessmentEndpointsCommunicator
{
/// <summary>
/// The Sql client to be used by this end points communicator
/// </summary>
private static SqlManagementClient SqlClient { get; set; }
/// <summary>
/// The Storage client to be used by this end points communicator
/// </summary>
private static StorageManagementClient StorageClient { get; set; }
/// <summary>
/// Gets or set the Azure subscription
/// </summary>
private static IAzureSubscription Subscription { get; set; }
/// <summary>
/// Gets or sets the Azure profile
/// </summary>
public IAzureContext Context { get; set; }
public VulnerabilityAssessmentEndpointsCommunicator(IAzureContext context)
{
Context = context;
if (context.Subscription != Subscription)
{
Subscription = context.Subscription;
SqlClient = null;
}
}
/// <summary>
/// Gets the database Vulnerability Assessment Settings for the given database in the given database server in the given resource group
/// </summary>
public DatabaseVulnerabilityAssessment GetDatabaseVulnerabilityAssessmentSettings(string resourceGroupName, string serverName, string databaseName)
{
return GetCurrentSqlClient().DatabaseVulnerabilityAssessments.Get(resourceGroupName, serverName, databaseName);
}
/// <summary>
/// Removes the database Vulnerability Assessment Settings for the given database in the given database server in the given resource group
/// </summary>
public void ClearDatabaseVulnerabilityAssessmentSettings(string resourceGroupName, string serverName, string databaseName)
{
GetCurrentSqlClient().DatabaseVulnerabilityAssessments.Delete(resourceGroupName, serverName, databaseName);
}
/// <summary>
/// Calls the set Vulnerability Assessment APIs for the database Vulnerability Assessment Settings for the given database in the given database server in the given resource group
/// </summary>
public DatabaseVulnerabilityAssessment SetDatabaseVulnerabilityAssessmentSettings(string resourceGroupName, string serverName, string databaseName, DatabaseVulnerabilityAssessment parameters)
{
return GetCurrentSqlClient().DatabaseVulnerabilityAssessments.CreateOrUpdate(resourceGroupName, serverName, databaseName, parameters);
}
/// <summary>
/// Gets the database Vulnerability Assessment rule baseline for the given rule in the given database in the given database server in the given resource group
/// </summary>
public DatabaseVulnerabilityAssessmentRuleBaseline GetDatabaseVulnerabilityAssessmentRuleBaseline(string resourceGroupName, string serverName,
string databaseName, string ruleId, bool ruleAppliesToMaster)
{
return GetCurrentSqlClient().DatabaseVulnerabilityAssessmentRuleBaselines.Get(resourceGroupName, serverName, databaseName, ruleId,
ruleAppliesToMaster ? VulnerabilityAssessmentPolicyBaselineName.Master : VulnerabilityAssessmentPolicyBaselineName.Default);
}
/// <summary>
/// Removes the database Vulnerability Assessment rule baseline for the given rule in the given database in the given database server in the given resource group
/// </summary>
public void ClearDatabaseVulnerabilityAssessmentRuleBaseline(string resourceGroupName, string serverName, string databaseName, string ruleId,
bool ruleAppliesToMaster)
{
GetCurrentSqlClient().DatabaseVulnerabilityAssessmentRuleBaselines.Delete(resourceGroupName, serverName, databaseName, ruleId,
ruleAppliesToMaster ? VulnerabilityAssessmentPolicyBaselineName.Master : VulnerabilityAssessmentPolicyBaselineName.Default);
}
/// <summary>
/// Calls the set Vulnerability Assessment APIs for the database Vulnerability Assessment rule baseline for the given rule in the given database in the given database server in the given resource group
/// </summary>
public void SetDatabaseVulnerabilityAssessmentRuleBaseline(string resourceGroupName, string serverName, string databaseName, string ruleId,
bool ruleAppliesToMaster, DatabaseVulnerabilityAssessmentRuleBaseline parameters)
{
GetCurrentSqlClient().DatabaseVulnerabilityAssessmentRuleBaselines.CreateOrUpdate(resourceGroupName, serverName, databaseName, ruleId,
ruleAppliesToMaster ? VulnerabilityAssessmentPolicyBaselineName.Master : VulnerabilityAssessmentPolicyBaselineName.Default, parameters);
}
/// <summary>
/// List the Vulnerability Assessment scan records
/// </summary>
public List<VulnerabilityAssessmentScanRecord> ListDatabaseVulnerabilityAssessmentScanRecords(string resourceGroupName, string serverName,
string databaseName)
{
return new List<VulnerabilityAssessmentScanRecord>(GetCurrentSqlClient().DatabaseVulnerabilityAssessmentScans.ListByDatabase(resourceGroupName,
serverName, databaseName));
}
/// <summary>
/// Gets a Vulnerability Assessment scan records
/// </summary>
public VulnerabilityAssessmentScanRecord GetDatabaseVulnerabilityAssessmentScanRecord(string resourceGroupName, string serverName,
string databaseName, string scanId)
{
return GetCurrentSqlClient().DatabaseVulnerabilityAssessmentScans.Get(resourceGroupName, serverName, databaseName, scanId);
}
/// <summary>
/// Exports a Vulnerability Assessment scan
/// </summary>
public DatabaseVulnerabilityAssessmentScansExport ConvertDatabaseVulnerabilityAssessmentScan(string resourceGroupName, string serverName,
string databaseName, string scanId)
{
return GetCurrentSqlClient().DatabaseVulnerabilityAssessmentScans.Export(resourceGroupName, serverName, databaseName, scanId);
}
/// <summary>
/// Triggers a Vulnerability Assessment scan
/// </summary>
public void TriggerDatabaseVulnerabilityAssessmentScan(string resourceGroupName, string serverName,
string databaseName, string scanId)
{
GetCurrentSqlClient().DatabaseVulnerabilityAssessmentScans.InitiateScan(resourceGroupName, serverName, databaseName, scanId);
}
public struct StorageContainerInfo
{
public string StorageAccountSasKey;
public string StorageContainerPath;
}
/// <summary>
/// Create a blob storage container ad a SAS URI
/// </summary>
public StorageContainerInfo CreateBlobStorageContainer(string resourceGroupName, string storageAccountName, string containerName)
{
StorageManagementClient storageClient = GetCurrentStorageClient();
var storageAccountObject = storageClient.StorageAccounts.GetProperties(resourceGroupName, storageAccountName);
var keysObject = storageClient.StorageAccounts.ListKeys(resourceGroupName, storageAccountName);
#if NETSTANDARD
var storageAccountBlobPrimaryEndpoints = storageAccountObject.PrimaryEndpoints.Blob;
var key = keysObject.Keys.FirstOrDefault().Value;
#else
var storageAccountBlobPrimaryEndpoints = storageAccountObject.StorageAccount.PrimaryEndpoints.Blob;
var key = keysObject.StorageAccountKeys.Key1;
// Create container
CloudStorageAccount storageAccountClient = new CloudStorageAccount(
new WindowsAzure.Storage.Auth.StorageCredentials(
storageAccountName,
key),
useHttps: true);
CloudBlobClient blobClient = storageAccountClient.CreateCloudBlobClient();
CloudBlobContainer containerReference = blobClient.GetContainerReference(containerName);
containerReference.CreateIfNotExistsAsync();
// Create the SAS key for the Vulnerability Assessment engine
// In this case no start time is specified, so the shared access signature becomes valid immediately.
// In Addition setting SharedAccessExpiryTime to DateTimeOffset.MaxValue is the equivalent of declaring an unlimited expiration date.
SharedAccessBlobPolicy sharedAccessPolicy = new SharedAccessBlobPolicy
{
SharedAccessExpiryTime = DateTimeOffset.MaxValue,
Permissions = SharedAccessBlobPermissions.Write | SharedAccessBlobPermissions.Read | SharedAccessBlobPermissions.List
};
// Generate the SAS Token
var sasToken = containerReference.GetSharedAccessSignature(sharedAccessPolicy);
#endif
return new StorageContainerInfo
{
StorageAccountSasKey = key,
StorageContainerPath = string.Format("{0}{1}", storageAccountBlobPrimaryEndpoints, containerName)
};
}
/// <summary>
/// Retrieve the SQL Management client for the currently selected subscription, adding the session and request
/// id tracing headers for the current cmdlet invocation.
/// </summary>
/// <returns>The SQL Management client for the currently selected subscription.</returns>
private SqlManagementClient GetCurrentSqlClient() => SqlClient ?? (SqlClient = AzureSession.Instance.ClientFactory.CreateArmClient<SqlManagementClient>(Context, AzureEnvironment.Endpoint.ResourceManager));
/// <summary>
/// Lazy creation of a single instance of a storage client
/// </summary>
private StorageManagementClient GetCurrentStorageClient() => StorageClient ?? (StorageClient = AzureEndpointsCommunicator.GetStorageV2Client(Context));
}
}
| |
using Bridge.Contract;
using Bridge.Contract.Constants;
using ICSharpCode.NRefactory.CSharp;
using ICSharpCode.NRefactory.Semantics;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using ICSharpCode.NRefactory.TypeSystem;
using Object.Net.Utilities;
namespace Bridge.Translator
{
public partial class ConstructorBlock : AbstractMethodBlock, IConstructorBlock
{
public ConstructorBlock(IEmitter emitter, ITypeInfo typeInfo, bool staticBlock)
: base(emitter, typeInfo.TypeDeclaration)
{
this.Emitter = emitter;
this.TypeInfo = typeInfo;
this.StaticBlock = staticBlock;
}
public ITypeInfo TypeInfo
{
get;
set;
}
public bool StaticBlock
{
get;
set;
}
public bool HasEntryPoint
{
get; set;
}
protected override void DoEmit()
{
if (this.StaticBlock)
{
this.EmitCtorForStaticClass();
}
else
{
this.EmitCtorForInstantiableClass();
}
}
protected virtual IEnumerable<string> GetInjectors()
{
var handlers = this.GetEventsAndAutoStartupMethods();
var injectors = this.Emitter.Plugins.GetConstructorInjectors(this);
return injectors.Concat(handlers);
}
protected virtual void EmitCtorForStaticClass()
{
var injectors = this.GetInjectors();
IEnumerable<string> fieldsInjectors = null;
var fieldBlock = new FieldBlock(this.Emitter, this.TypeInfo, true, true);
fieldBlock.Emit();
fieldsInjectors = fieldBlock.Injectors;
if (fieldBlock.WasEmitted)
{
this.Emitter.Comma = true;
}
bool ctorHeader = false;
if (this.TypeInfo.StaticConfig.HasConfigMembers || injectors.Any() || fieldsInjectors.Any())
{
this.EnsureComma();
if (this.TypeInfo.StaticConfig.HasConfigMembers)
{
var configBlock = new FieldBlock(this.Emitter, this.TypeInfo, true, false);
configBlock.ClearTempVariables = false;
configBlock.Emit();
if (configBlock.Injectors.Count > 0)
{
injectors = configBlock.Injectors.Concat(injectors);
}
if (configBlock.WasEmitted)
{
this.Emitter.Comma = true;
}
}
if (fieldsInjectors.Any())
{
injectors = fieldsInjectors.Concat(injectors);
}
if (injectors.Count() > 0)
{
this.EnsureComma();
ctorHeader = true;
this.Write(JS.Fields.CTORS);
this.WriteColon();
this.BeginBlock();
this.Write(JS.Funcs.INIT);
this.WriteColon();
this.WriteFunction();
this.WriteOpenParentheses();
this.WriteCloseParentheses();
this.WriteSpace();
this.BeginBlock();
if (this.Emitter.TempVariables != null)
{
this.SimpleEmitTempVars();
this.Emitter.TempVariables = new Dictionary<string, bool>();
}
foreach (var fn in injectors)
{
this.Write(WriteIndentToString(fn, this.Level - 1));
this.WriteNewLine();
}
this.EndBlock();
this.Emitter.Comma = true;
}
}
var ctor = this.TypeInfo.StaticCtor;
if (ctor != null && ctor.Body.HasChildren)
{
this.EnsureComma();
if (!ctorHeader)
{
ctorHeader = true;
this.Write(JS.Fields.CTORS);
this.WriteColon();
this.BeginBlock();
}
this.ResetLocals();
var prevNamesMap = this.BuildLocalsNamesMap();
this.Write(JS.Funcs.CONSTRUCTOR);
this.WriteColon();
this.WriteFunction();
this.WriteOpenCloseParentheses(true);
this.BeginBlock();
var beginPosition = this.Emitter.Output.Length;
var oldRules = this.Emitter.Rules;
var rr = this.Emitter.Resolver.ResolveNode(ctor, this.Emitter) as MemberResolveResult;
if (rr != null)
{
this.Emitter.Rules = Rules.Get(this.Emitter, rr.Member);
}
ctor.Body.AcceptChildren(this.Emitter);
if (!this.Emitter.IsAsync)
{
var indent = this.Emitter.TempVariables.Count > 0;
this.EmitTempVars(beginPosition, true);
if (indent)
{
this.Indent();
}
}
this.Emitter.Rules = oldRules;
this.EndBlock();
this.ClearLocalsNamesMap(prevNamesMap);
this.Emitter.Comma = true;
}
if (ctorHeader)
{
this.WriteNewLine();
this.EndBlock();
}
}
private bool ctorHeader = false;
protected virtual IEnumerable<string> EmitInitMembers()
{
var injectors = this.GetInjectors();
var constructorWrapperString = CS.Wrappers.CONSTRUCTORWRAPPER + ":";
IEnumerable<string> ctorWrappers = injectors.Where(i => i.StartsWith(constructorWrapperString)).Select(i => i.Substring(constructorWrapperString.Length));
injectors = injectors.Where(i => !i.StartsWith(constructorWrapperString));
IEnumerable<string> fieldsInjectors = null;
var fieldBlock = new FieldBlock(this.Emitter, this.TypeInfo, false, true);
fieldBlock.Emit();
fieldsInjectors = fieldBlock.Injectors;
if (fieldBlock.WasEmitted)
{
this.Emitter.Comma = true;
}
if (!this.TypeInfo.InstanceConfig.HasConfigMembers && !injectors.Any() && !fieldsInjectors.Any())
{
return ctorWrappers;
}
if (this.TypeInfo.InstanceConfig.HasConfigMembers)
{
var configBlock = new FieldBlock(this.Emitter, this.TypeInfo, false, false);
configBlock.ClearTempVariables = false;
configBlock.Emit();
if (configBlock.Injectors.Count > 0)
{
injectors = configBlock.Injectors.Concat(injectors);
}
if (configBlock.WasEmitted)
{
this.Emitter.Comma = true;
}
}
if (fieldsInjectors.Any())
{
injectors = fieldsInjectors.Concat(injectors);
}
if (injectors.Count() > 0)
{
this.EnsureComma();
this.ctorHeader = true;
this.Write(JS.Fields.CTORS);
this.WriteColon();
this.BeginBlock();
this.Write(JS.Funcs.INIT);
this.WriteColon();
this.WriteFunction();
this.WriteOpenParentheses();
this.WriteCloseParentheses();
this.WriteSpace();
this.BeginBlock();
if (this.Emitter.TempVariables != null)
{
this.SimpleEmitTempVars();
this.Emitter.TempVariables = new Dictionary<string, bool>();
}
foreach (var fn in injectors)
{
this.Write(WriteIndentToString(fn, this.Level - 1));
this.WriteNewLine();
}
this.EndBlock();
this.Emitter.Comma = true;
}
return ctorWrappers;
}
protected virtual void EmitCtorForInstantiableClass()
{
var baseType = this.Emitter.GetBaseTypeDefinition();
var typeDef = this.Emitter.GetTypeDefinition();
var isObjectLiteral = this.Emitter.Validator.IsObjectLiteral(typeDef);
var isPlainMode = this.Emitter.Validator.GetObjectCreateMode(typeDef) == 0;
var ctorWrappers = isObjectLiteral ? new string[0] : this.EmitInitMembers().ToArray();
if (!this.TypeInfo.HasRealInstantiable(this.Emitter) && ctorWrappers.Length == 0 || isObjectLiteral && isPlainMode)
{
if (this.ctorHeader)
{
this.WriteNewLine();
this.EndBlock();
}
return;
}
bool forceDefCtor = isObjectLiteral && this.Emitter.Validator.GetObjectCreateMode(typeDef) == 1 && this.TypeInfo.Ctors.Count == 0;
if (typeDef.IsValueType || forceDefCtor || (this.TypeInfo.Ctors.Count == 0 && ctorWrappers.Length > 0))
{
this.TypeInfo.Ctors.Add(new ConstructorDeclaration
{
Modifiers = Modifiers.Public,
Body = new BlockStatement()
});
}
if (!this.ctorHeader && this.TypeInfo.Ctors.Count > 0)
{
this.EnsureComma();
this.ctorHeader = true;
this.Write(JS.Fields.CTORS);
this.WriteColon();
this.BeginBlock();
}
this.Emitter.InConstructor = true;
foreach (var ctor in this.TypeInfo.Ctors)
{
var oldRules = this.Emitter.Rules;
if (ctor.Body.HasChildren)
{
var rr = this.Emitter.Resolver.ResolveNode(ctor, this.Emitter) as MemberResolveResult;
if (rr != null)
{
this.Emitter.Rules = Rules.Get(this.Emitter, rr.Member);
}
}
this.EnsureComma();
this.ResetLocals();
var prevMap = this.BuildLocalsMap();
var prevNamesMap = this.BuildLocalsNamesMap();
this.AddLocals(ctor.Parameters, ctor.Body);
var ctorName = JS.Funcs.CONSTRUCTOR;
if (this.TypeInfo.Ctors.Count > 1 && ctor.Parameters.Count > 0)
{
var overloads = OverloadsCollection.Create(this.Emitter, ctor);
ctorName = overloads.GetOverloadName();
}
XmlToJsDoc.EmitComment(this, ctor);
this.Write(ctorName);
this.WriteColon();
this.WriteFunction();
int pos = this.Emitter.Output.Length;
this.EmitMethodParameters(ctor.Parameters, null, ctor);
var ctorParams = this.Emitter.Output.ToString().Substring(pos);
this.WriteSpace();
this.BeginBlock();
var len = this.Emitter.Output.Length;
var requireNewLine = false;
var noThisInvocation = ctor.Initializer == null || ctor.Initializer.IsNull || ctor.Initializer.ConstructorInitializerType == ConstructorInitializerType.Base;
IWriterInfo oldWriter = null;
if (ctorWrappers.Length > 0 && noThisInvocation)
{
oldWriter = this.SaveWriter();
this.NewWriter();
}
this.ConvertParamsToReferences(ctor.Parameters);
if (len != this.Emitter.Output.Length)
{
requireNewLine = true;
}
if (isObjectLiteral)
{
if (requireNewLine)
{
this.WriteNewLine();
}
this.Write("var " + JS.Vars.D_THIS + " = ");
var isBaseObjectLiteral = baseType != null && this.Emitter.Validator.IsObjectLiteral(baseType);
if (isBaseObjectLiteral && baseType != null && (!this.Emitter.Validator.IsExternalType(baseType) || this.Emitter.Validator.IsBridgeClass(baseType)) ||
(ctor.Initializer != null && ctor.Initializer.ConstructorInitializerType == ConstructorInitializerType.This))
{
this.EmitBaseConstructor(ctor, ctorName, true);
}
else if (isBaseObjectLiteral && baseType != null && ctor.Initializer != null &&
ctor.Initializer.ConstructorInitializerType == ConstructorInitializerType.Base)
{
this.EmitExternalBaseCtor(ctor, ref requireNewLine);
}
else
{
this.Write("{ };");
}
this.WriteNewLine();
string name = this.Emitter.Validator.GetCustomTypeName(typeDef, this.Emitter, false);
if (name.IsEmpty())
{
name = BridgeTypes.ToJsName(this.TypeInfo.Type, this.Emitter);
}
this.Write(JS.Vars.D_THIS + "." + JS.Funcs.GET_TYPE + " = function () { return " + name + "; };");
this.WriteNewLine();
this.Write("(function ()");
this.BeginBlock();
requireNewLine = false;
}
var beginPosition = this.Emitter.Output.Length;
if (noThisInvocation)
{
if (requireNewLine)
{
this.WriteNewLine();
}
if (isObjectLiteral)
{
var fieldBlock = new FieldBlock(this.Emitter, this.TypeInfo, false, false, true);
fieldBlock.Emit();
var properties = this.TypeInfo.InstanceProperties;
var names = new List<string>(properties.Keys);
foreach (var name in names)
{
var props = properties[name];
foreach (var prop in props)
{
var p = prop as PropertyDeclaration;
if (p != null)
{
if (p.Getter.Body.IsNull && p.Setter.Body.IsNull)
{
continue;
}
this.Write(JS.Types.Object.DEFINEPROPERTY);
this.WriteOpenParentheses();
this.Write("this, ");
this.WriteScript(OverloadsCollection.Create(this.Emitter, p).GetOverloadName());
this.WriteComma();
this.Emitter.Comma = false;
this.BeginBlock();
var memberResult = this.Emitter.Resolver.ResolveNode(p, this.Emitter) as MemberResolveResult;
var block = new VisitorPropertyBlock(this.Emitter, p);
block.EmitPropertyMethod(p, p.Getter, ((IProperty)memberResult.Member).Getter, false, true);
block.EmitPropertyMethod(p, p.Setter, ((IProperty)memberResult.Member).Setter, true, true);
this.EnsureComma(true);
this.Write(JS.Fields.ENUMERABLE + ": true");
this.WriteNewLine();
this.EndBlock();
this.WriteCloseParentheses();
this.Write(";");
this.WriteNewLine();
}
}
}
}
else
{
this.Write("this." + JS.Funcs.INITIALIZE + "();");
requireNewLine = true;
}
}
if (!isObjectLiteral)
{
if (baseType != null && (!this.Emitter.Validator.IsExternalType(baseType) || this.Emitter.Validator.IsBridgeClass(baseType)) ||
(ctor.Initializer != null && ctor.Initializer.ConstructorInitializerType == ConstructorInitializerType.This))
{
if (requireNewLine)
{
this.WriteNewLine();
requireNewLine = false;
}
this.EmitBaseConstructor(ctor, ctorName, false);
}
else if (baseType != null && (ctor.Initializer == null || ctor.Initializer.IsNull || ctor.Initializer.ConstructorInitializerType == ConstructorInitializerType.Base))
{
this.EmitExternalBaseCtor(ctor, ref requireNewLine);
}
}
var script = this.Emitter.GetScript(ctor);
var hasAdditionalIndent = false;
if (script == null)
{
if (ctor.Body.HasChildren)
{
if (requireNewLine)
{
this.WriteNewLine();
}
ctor.Body.AcceptChildren(this.Emitter);
if (!this.Emitter.IsAsync)
{
hasAdditionalIndent = this.Emitter.TempVariables.Count > 0;
this.EmitTempVars(beginPosition, true);
}
}
else if (requireNewLine)
{
this.WriteNewLine();
}
}
else
{
if (requireNewLine)
{
this.WriteNewLine();
}
this.WriteLines(script);
}
if (oldWriter != null)
{
this.WrapBody(oldWriter, ctorWrappers, ctorParams);
}
if (isObjectLiteral)
{
if (requireNewLine)
{
this.WriteNewLine();
}
this.EndBlock();
this.Write(")." + JS.Funcs.CALL + "(" + JS.Vars.D_THIS + ");");
this.WriteNewLine();
this.Write("return " + JS.Vars.D_THIS + ";");
this.WriteNewLine();
}
if (hasAdditionalIndent)
{
this.Indent();
}
this.EndBlock();
this.Emitter.Comma = true;
this.ClearLocalsMap(prevMap);
this.ClearLocalsNamesMap(prevNamesMap);
this.Emitter.Rules = oldRules;
}
this.Emitter.InConstructor = false;
if (this.ctorHeader)
{
this.WriteNewLine();
this.EndBlock();
}
}
private void EmitExternalBaseCtor(ConstructorDeclaration ctor, ref bool requireNewLine)
{
IMember member = null;
var hasInitializer = ctor.Initializer != null && !ctor.Initializer.IsNull;
var baseType = this.Emitter.GetBaseTypeDefinition();
if (hasInitializer)
{
member = ((InvocationResolveResult)this.Emitter.Resolver.ResolveNode(ctor.Initializer, this.Emitter)).Member;
}
if (member != null)
{
var inlineCode = this.Emitter.GetInline(member);
if (!string.IsNullOrEmpty(inlineCode))
{
if (requireNewLine)
{
this.WriteNewLine();
requireNewLine = false;
}
this.Write(JS.Types.Bridge.APPLY);
this.WriteOpenParentheses();
this.Write("this, ");
var argsInfo = new ArgumentsInfo(this.Emitter, ctor.Initializer);
new InlineArgumentsBlock(this.Emitter, argsInfo, inlineCode).Emit();
this.WriteCloseParentheses();
this.WriteSemiColon();
this.WriteNewLine();
return;
}
}
if (hasInitializer || (baseType.FullName != "System.Object" && baseType.FullName != "System.ValueType" && baseType.FullName != "System.Enum" && !baseType.CustomAttributes.Any(a => a.AttributeType.FullName == "Bridge.NonScriptableAttribute") && !baseType.IsInterface))
{
if (requireNewLine)
{
this.WriteNewLine();
requireNewLine = false;
}
string name = null;
if (this.TypeInfo.GetBaseTypes(this.Emitter).Any())
{
name = BridgeTypes.ToJsName(this.TypeInfo.GetBaseClass(this.Emitter), this.Emitter);
}
else
{
name = BridgeTypes.ToJsName(baseType, this.Emitter);
}
this.Write(name);
this.WriteCall();
int openPos = this.Emitter.Output.Length;
this.WriteOpenParentheses();
this.Write("this");
if (hasInitializer && ctor.Initializer.Arguments.Count > 0)
{
this.Write(", ");
var argsInfo = new ArgumentsInfo(this.Emitter, ctor.Initializer);
var argsExpressions = argsInfo.ArgumentsExpressions;
var paramsArg = argsInfo.ParamsExpression;
new ExpressionListBlock(this.Emitter, argsExpressions, paramsArg, ctor.Initializer, openPos).Emit();
}
this.WriteCloseParentheses();
this.WriteSemiColon();
this.WriteNewLine();
}
}
protected virtual void WrapBody(IWriterInfo oldWriter, string[] ctorWrappers, string ctorParams)
{
var body = this.Emitter.Output.ToString();
this.RestoreWriter(oldWriter);
List<string> endParts = new List<string>();
StringBuilder sb = new StringBuilder();
for (int i = 0; i < ctorWrappers.Length; i++)
{
var isLast = i == (ctorWrappers.Length - 1);
var ctorWrapper = ctorWrappers[i];
var parts = ctorWrapper.Split(new[] { CS.Wrappers.Params.BODY }, StringSplitOptions.RemoveEmptyEntries);
endParts.Add(parts[1]);
sb.Append(parts[0]);
sb.Append("function ");
sb.Append(ctorParams);
sb.Append(" {");
if (!isLast)
{
sb.Append(Bridge.Translator.Emitter.NEW_LINE);
}
Indent();
for (var j = 0; j < this.Emitter.Level; j++)
{
sb.Append(Bridge.Translator.Emitter.INDENT);
}
if (isLast)
{
sb.Append(this.WriteIndentToString(body));
}
}
endParts.Reverse();
var newLine = false;
foreach (var endPart in endParts)
{
Outdent();
if (newLine)
{
sb.Append(Bridge.Translator.Emitter.NEW_LINE);
for (var j = 0; j < this.Emitter.Level; j++)
{
sb.Append(Bridge.Translator.Emitter.INDENT);
}
}
else if (sb.ToString().Substring(sb.Length - 4) == Bridge.Translator.Emitter.INDENT)
{
sb.Length -= 4;
}
newLine = true;
sb.Append("}");
sb.Append(endPart);
}
this.Write(sb.ToString());
this.WriteNewLine();
}
protected virtual void EmitBaseConstructor(ConstructorDeclaration ctor, string ctorName, bool isObjectLiteral)
{
var initializer = ctor.Initializer != null && !ctor.Initializer.IsNull ? ctor.Initializer : new ConstructorInitializer()
{
ConstructorInitializerType = ConstructorInitializerType.Base
};
bool appendScope = false;
bool isBaseObjectLiteral = false;
if (initializer.ConstructorInitializerType == ConstructorInitializerType.Base)
{
var baseType = this.Emitter.GetBaseTypeDefinition();
//var baseName = JS.Funcs.CONSTRUCTOR;
string baseName = null;
isBaseObjectLiteral = this.Emitter.Validator.IsObjectLiteral(baseType);
if (ctor.Initializer != null && !ctor.Initializer.IsNull)
{
var member = ((InvocationResolveResult)this.Emitter.Resolver.ResolveNode(ctor.Initializer, this.Emitter)).Member;
var overloads = OverloadsCollection.Create(this.Emitter, member);
if (overloads.HasOverloads)
{
baseName = overloads.GetOverloadName();
}
}
string name = null;
if (this.TypeInfo.GetBaseTypes(this.Emitter).Any())
{
name = BridgeTypes.ToJsName(this.TypeInfo.GetBaseClass(this.Emitter), this.Emitter);
}
else
{
name = BridgeTypes.ToJsName(baseType, this.Emitter);
}
if (!isObjectLiteral && isBaseObjectLiteral)
{
this.Write(JS.Types.Bridge.COPY_PROPERTIES);
this.WriteOpenParentheses();
this.Write("this, ");
}
this.Write(name, ".");
if (baseName == null)
{
var baseIType = this.Emitter.BridgeTypes.Get(baseType).Type;
var baseCtor = baseIType.GetConstructors().SingleOrDefault(c => c.Parameters.Count == 0);
if (baseCtor == null)
{
baseCtor = baseIType.GetConstructors().SingleOrDefault(c => c.Parameters.All(p => p.IsOptional));
}
if (baseCtor == null)
{
baseCtor = baseIType.GetConstructors().SingleOrDefault(c => c.Parameters.Count == 1 && c.Parameters.First().IsParams);
}
if (baseCtor != null)
{
baseName = OverloadsCollection.Create(this.Emitter, baseCtor).GetOverloadName();
}
else
{
baseName = JS.Funcs.CONSTRUCTOR;
}
}
this.Write(baseName);
if (!isObjectLiteral)
{
this.WriteCall();
appendScope = true;
}
}
else
{
// this.WriteThis();
string name = BridgeTypes.ToJsName(this.TypeInfo.Type, this.Emitter);
this.Write(name);
this.WriteDot();
var baseName = JS.Funcs.CONSTRUCTOR;
var member = ((InvocationResolveResult)this.Emitter.Resolver.ResolveNode(ctor.Initializer, this.Emitter)).Member;
var overloads = OverloadsCollection.Create(this.Emitter, member);
if (overloads.HasOverloads)
{
baseName = overloads.GetOverloadName();
}
this.Write(baseName);
if (!isObjectLiteral)
{
this.WriteCall();
appendScope = true;
}
}
int openPos = this.Emitter.Output.Length;
this.WriteOpenParentheses();
if (appendScope)
{
this.WriteThis();
if (initializer.Arguments.Count > 0)
{
this.WriteComma();
}
}
if (initializer.Arguments.Count > 0)
{
var argsInfo = new ArgumentsInfo(this.Emitter, ctor.Initializer);
var argsExpressions = argsInfo.ArgumentsExpressions;
var paramsArg = argsInfo.ParamsExpression;
new ExpressionListBlock(this.Emitter, argsExpressions, paramsArg, ctor.Initializer, openPos).Emit();
}
if (!isObjectLiteral && isBaseObjectLiteral)
{
this.WriteCloseParentheses();
}
this.WriteCloseParentheses();
this.WriteSemiColon();
if (!isObjectLiteral)
{
this.WriteNewLine();
}
}
protected virtual bool IsGenericType()
{
return this.TypeInfo.Type.TypeParameterCount > 0;
}
private bool IsGenericMethod(MethodDeclaration methodDeclaration)
{
return methodDeclaration.TypeParameters.Any();
}
}
}
| |
//
// Copyright (c) 2003-2006 Jaroslaw Kowalski <jaak@jkowalski.net>
// Copyright (c) 2006-2014 Piotr Fusik <piotr@fusik.info>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using Sooda.QL.TypedWrappers;
using System;
using System.Collections;
using System.IO;
namespace Sooda.QL
{
/// <summary>
/// Summary description for SoqlPrettyPrinter.
/// </summary>
public class SoqlPrettyPrinter : ISoqlVisitor
{
public bool IndentOutput = true;
public IList ParameterValues;
public SoqlPrettyPrinter(TextWriter output)
{
Output = output;
ParameterValues = null;
}
public SoqlPrettyPrinter(TextWriter output, IList parameterValues)
{
Output = output;
ParameterValues = parameterValues;
}
public virtual void Visit(SoqlTypedWrapperExpression v)
{
v.InnerExpression.Accept(this);
}
public virtual void Visit(SoqlBooleanWrapperExpression v)
{
v.InnerExpression.Accept(this);
}
protected virtual void Write(SoqlBinaryOperator op)
{
switch (op)
{
case SoqlBinaryOperator.Add:
Output.Write('+');
break;
case SoqlBinaryOperator.Sub:
Output.Write('-');
break;
case SoqlBinaryOperator.Div:
Output.Write('/');
break;
case SoqlBinaryOperator.Mul:
Output.Write('*');
break;
case SoqlBinaryOperator.Mod:
Output.Write('%');
break;
case SoqlBinaryOperator.Concat:
Output.Write("||");
break;
}
}
public virtual void Visit(SoqlBinaryExpression v)
{
Output.Write('(');
v.par1.Accept(this);
Output.Write(' ');
Write(v.op);
Output.Write(' ');
v.par2.Accept(this);
Output.Write(')');
}
public virtual void Visit(SoqlBooleanAndExpression v)
{
Output.Write('(');
v.Left.Accept(this);
Output.Write(" and ");
v.Right.Accept(this);
Output.Write(')');
}
public virtual void Visit(SoqlBooleanInExpression v)
{
if (v.Right.Count == 0)
{
Output.Write("0=1");
return;
}
v.Left.Accept(this);
Output.Write(" in ");
if (v.Right.Count != 1 || !(v.Right[0] is SoqlQueryExpression))
Output.Write('(');
for (int i = 0; i < v.Right.Count; ++i)
{
if (i > 0)
Output.Write(',');
v.Right[i].Accept(this);
}
if (v.Right.Count != 1 || !(v.Right[0] is SoqlQueryExpression))
Output.Write(')');
}
public virtual void Visit(SoqlBooleanIsNullExpression v)
{
v.Expr.Accept(this);
Output.Write(" is ");
if (v.NotNull)
Output.Write("not ");
Output.Write("null");
}
public virtual void Visit(SoqlBooleanLiteralExpression v)
{
Output.Write(v.Value);
}
public virtual void Visit(SoqlBooleanNegationExpression v)
{
Output.Write("(not (");
v.par.Accept(this);
Output.Write("))");
}
public virtual void Visit(SoqlUnaryNegationExpression v)
{
Output.Write("(-(");
v.par.Accept(this);
Output.Write("))");
}
public virtual void Visit(SoqlBooleanOrExpression v)
{
Output.Write('(');
v.par1.Accept(this);
Output.Write(" OR ");
v.par2.Accept(this);
Output.Write(')');
}
protected void OutputRelationalOperator(SoqlRelationalOperator op)
{
Output.Write(' ');
switch (op)
{
case SoqlRelationalOperator.Greater:
Output.Write('>');
break;
case SoqlRelationalOperator.Less:
Output.Write('<');
break;
case SoqlRelationalOperator.LessOrEqual:
Output.Write("<=");
break;
case SoqlRelationalOperator.GreaterOrEqual:
Output.Write(">=");
break;
case SoqlRelationalOperator.Equal:
Output.Write('=');
break;
case SoqlRelationalOperator.NotEqual:
Output.Write("<>");
break;
case SoqlRelationalOperator.Like:
Output.Write("like");
break;
default:
throw new NotImplementedException(op.ToString());
}
Output.Write(' ');
}
public virtual void Visit(SoqlBooleanRelationalExpression v)
{
Output.Write('(');
v.par1.Accept(this);
OutputRelationalOperator(v.op);
v.par2.Accept(this);
Output.Write(')');
}
public virtual void Visit(SoqlExistsExpression v)
{
Output.Write("exists ");
v.Query.Accept(this);
if (IndentOutput)
Output.WriteLine();
}
public virtual void Visit(SoqlFunctionCallExpression v)
{
Output.Write(v.FunctionName);
Output.Write('(');
if (v.Parameters.Count == 1 && v.Parameters[0] is SoqlAsteriskExpression)
{
// special case for count(*) - temporary hack
Output.Write('*');
}
else
{
for (int i = 0; i < v.Parameters.Count; ++i)
{
if (i != 0)
Output.Write(", ");
v.Parameters[i].Accept(this);
}
}
Output.Write(')');
}
public virtual void Visit(SoqlLiteralExpression v)
{
if (v.LiteralValue is String)
{
Output.Write('\'');
Output.Write(((string)v.LiteralValue).Replace("'", "''"));
Output.Write('\'');
}
else if (v.LiteralValue is DateTime)
{
Output.Write('\'');
Output.Write(((DateTime)v.LiteralValue).ToString("yyyyMMdd HH:mm:ss"));
Output.Write('\'');
}
else if (v.LiteralValue == null)
{
Output.Write("null");
}
else
{
Output.Write(v.LiteralValue);
}
}
public virtual void Visit(SoqlNullLiteral v)
{
Output.Write("null");
}
public virtual void Visit(SoqlParameterLiteralExpression v)
{
if (ParameterValues != null)
{
object parameterValue = ParameterValues[v.ParameterPosition];
if (parameterValue is String)
{
Output.Write('\'');
Output.Write(((string)parameterValue).Replace("'", "''"));
Output.Write('\'');
}
else if (parameterValue is DateTime)
{
Output.Write('\'');
Output.Write(((DateTime)parameterValue).ToString("yyyyMMdd HH:mm:ss"));
Output.Write('\'');
}
else
{
Output.Write(parameterValue);
}
}
else
{
Output.Write('{');
Output.Write(v.ParameterPosition);
if (v.Modifiers != null)
{
Output.Write(':');
Output.Write(v.Modifiers.ToString());
}
Output.Write('}');
}
}
public virtual void Visit(SoqlPathExpression v)
{
if (v.Left != null)
{
v.Left.Accept(this);
Output.Write('.');
}
Output.Write(v.PropertyName);
}
public virtual void Visit(SoqlAsteriskExpression v)
{
if (v.Left != null)
{
v.Left.Accept(this);
Output.Write('.');
}
Output.Write('*');
}
public virtual void Visit(SoqlCountExpression v)
{
if (v.Path != null)
{
v.Path.Accept(this);
Output.Write('.');
}
Output.Write(v.CollectionName);
Output.Write(".Count");
}
public virtual void Visit(SoqlSoodaClassExpression v)
{
if (v.Path != null)
{
v.Path.Accept(this);
Output.Write('.');
}
Output.Write("SoodaClass");
}
public virtual void Visit(SoqlContainsExpression v)
{
if (v.Path != null)
{
v.Path.Accept(this);
Output.Write('.');
}
Output.Write(v.CollectionName);
Output.Write('.');
Output.Write("Contains(");
if (v.Expr is SoqlQueryExpression && IndentOutput)
Output.WriteLine();
v.Expr.Accept(this);
if (v.Expr is SoqlQueryExpression && IndentOutput)
Output.WriteLine();
Output.Write(')');
}
public virtual void Visit(SoqlQueryExpression v)
{
Output.Write('(');
if (IndentOutput)
Output.WriteLine();
IndentLevel++;
try
{
if (v.SelectExpressions.Count > 0)
{
WriteIndentString();
Output.Write("select ");
if (v.Distinct)
Output.Write("distinct ");
for (int i = 0; i < v.SelectExpressions.Count; ++i)
{
if (i > 0)
{
if (IndentOutput)
{
Output.WriteLine(',');
WriteIndentString();
Output.Write(" ");
}
else
{
Output.Write(',');
}
}
if (v.SelectExpressions[i] is SoqlQueryExpression)
Output.Write('(');
v.SelectExpressions[i].Accept(this);
if (v.SelectExpressions[i] is SoqlQueryExpression)
Output.Write(')');
if (v.SelectAliases[i].Length > 0)
{
Output.Write(" as ");
Output.Write(v.SelectAliases[i]);
}
}
if (IndentOutput)
{
Output.WriteLine();
WriteIndentString();
Output.Write("from ");
}
else
{
Output.Write(" from ");
}
}
else
{
WriteIndentString();
}
for (int i = 0; i < v.From.Count; ++i)
{
if (i > 0)
{
if (IndentOutput)
{
Output.WriteLine(',');
WriteIndentString();
Output.Write(" ");
}
else
{
Output.Write(',');
}
}
Output.Write(v.From[i]);
if (v.FromAliases[i].Length > 0)
{
Output.Write(" as ");
Output.Write(v.FromAliases[i]);
}
}
if (v.WhereClause != null)
{
if (IndentOutput)
{
Output.WriteLine();
WriteIndentString();
Output.Write("where ");
}
else
{
Output.Write(" where ");
}
v.WhereClause.Accept(this);
}
if (v.GroupByExpressions != null && v.GroupByExpressions.Count > 0)
{
if (IndentOutput)
{
Output.WriteLine();
WriteIndentString();
}
else
{
Output.Write(' ');
}
Output.Write("group by ");
for (int i = 0; i < v.GroupByExpressions.Count; ++i)
{
if (i > 0)
Output.Write(", ");
v.GroupByExpressions[i].Accept(this);
}
}
if (v.Having != null)
{
if (IndentOutput)
{
Output.WriteLine();
WriteIndentString();
}
else
{
Output.Write(' ');
}
Output.Write("having ");
v.Having.Accept(this);
}
if (v.OrderByExpressions != null && v.OrderByExpressions.Count > 0)
{
if (IndentOutput)
{
Output.WriteLine();
WriteIndentString();
}
else
{
Output.Write(' ');
}
Output.Write("order by ");
for (int i = 0; i < v.OrderByExpressions.Count; ++i)
{
if (i > 0)
Output.Write(", ");
v.OrderByExpressions[i].Accept(this);
Output.Write(' ');
Output.Write(v.OrderByOrder[i]);
}
}
}
finally
{
IndentLevel--;
}
Output.Write(')');
}
public virtual void Visit(SoqlRawExpression v)
{
Output.Write("RAWQUERY(");
Output.Write(v.Text);
Output.Write(')');
}
public virtual void Visit(SoqlConditionalExpression v)
{
Output.Write("case when ");
v.condition.Accept(this);
Output.Write(" then ");
v.ifTrue.Accept(this);
if (v.ifFalse != null)
{
Output.Write(" else ");
v.ifFalse.Accept(this);
}
Output.Write(" end");
}
static readonly char[] LikeMetacharacters = { '%', '_', '[' };
public virtual void Visit(SoqlStringContainsExpression v)
{
Output.Write('(');
v.haystack.Accept(this);
Output.Write(" like '");
if (v.position != SoqlStringContainsPosition.Start)
Output.Write('%');
string s = v.needle.Replace("'", "''");
string suffix;
if (s.IndexOfAny(LikeMetacharacters) >= 0)
{
s = s.Replace("~", "~~").Replace("%", "~%").Replace("_", "~_").Replace("[", "~[");
suffix = "' escape '~')";
}
else
{
suffix = "')";
}
Output.Write(s);
if (v.position != SoqlStringContainsPosition.End)
Output.Write('%');
Output.Write(suffix);
}
void Sooda.QL.ISoqlVisitor.Visit(SoqlCastExpression v)
{
Output.Write("cast(");
v.source.Accept(this);
Output.Write(" as ");
Output.Write(v.type);
Output.Write(')');
}
public TextWriter Output;
public int IndentLevel = -1;
public int IndentStep = 4;
public void WriteIndentString()
{
if (IndentOutput)
{
for (int i = 0; i < IndentLevel * IndentStep; ++i)
Output.Write(' ');
}
}
public void PrintQuery(SoqlQueryExpression expr)
{
expr.Accept(this);
}
public void PrintExpression(SoqlExpression expr)
{
expr.Accept(this);
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Concurrent;
using System.Threading;
namespace QuantConnect.Util
{
/// <summary>
/// Used to control the rate of some occurrence per unit of time.
/// </summary>
/// <see href="http://www.jackleitch.net/2010/10/better-rate-limiting-with-dot-net/"/>
/// <remarks>
/// <para>
/// To control the rate of an action using a <see cref="RateGate"/>,
/// code should simply call <see cref="WaitToProceed()"/> prior to
/// performing the action. <see cref="WaitToProceed()"/> will block
/// the current thread until the action is allowed based on the rate
/// limit.
/// </para>
/// <para>
/// This class is thread safe. A single <see cref="RateGate"/> instance
/// may be used to control the rate of an occurrence across multiple
/// threads.
/// </para>
/// </remarks>
public class RateGate : IDisposable
{
// Semaphore used to count and limit the number of occurrences per
// unit time.
private readonly SemaphoreSlim _semaphore;
// Times (in millisecond ticks) at which the semaphore should be exited.
private readonly ConcurrentQueue<int> _exitTimes;
// Timer used to trigger exiting the semaphore.
private readonly Timer _exitTimer;
// Whether this instance is disposed.
private bool _isDisposed;
/// <summary>
/// Number of occurrences allowed per unit of time.
/// </summary>
public int Occurrences
{
get; private set;
}
/// <summary>
/// The length of the time unit, in milliseconds.
/// </summary>
public int TimeUnitMilliseconds
{
get; private set;
}
/// <summary>
/// Flag indicating we are currently being rate limited
/// </summary>
public bool IsRateLimited
{
get { return !WaitToProceed(0); }
}
/// <summary>
/// Initializes a <see cref="RateGate"/> with a rate of <paramref name="occurrences"/>
/// per <paramref name="timeUnit"/>.
/// </summary>
/// <param name="occurrences">Number of occurrences allowed per unit of time.</param>
/// <param name="timeUnit">Length of the time unit.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// If <paramref name="occurrences"/> or <paramref name="timeUnit"/> is negative.
/// </exception>
public RateGate(int occurrences, TimeSpan timeUnit)
{
// Check the arguments.
if (occurrences <= 0)
throw new ArgumentOutOfRangeException(nameof(occurrences), "Number of occurrences must be a positive integer");
if (timeUnit != timeUnit.Duration())
throw new ArgumentOutOfRangeException(nameof(timeUnit), "Time unit must be a positive span of time");
if (timeUnit >= TimeSpan.FromMilliseconds(UInt32.MaxValue))
throw new ArgumentOutOfRangeException(nameof(timeUnit), "Time unit must be less than 2^32 milliseconds");
Occurrences = occurrences;
TimeUnitMilliseconds = (int)timeUnit.TotalMilliseconds;
// Create the semaphore, with the number of occurrences as the maximum count.
_semaphore = new SemaphoreSlim(Occurrences, Occurrences);
// Create a queue to hold the semaphore exit times.
_exitTimes = new ConcurrentQueue<int>();
// Create a timer to exit the semaphore. Use the time unit as the original
// interval length because that's the earliest we will need to exit the semaphore.
_exitTimer = new Timer(ExitTimerCallback, null, TimeUnitMilliseconds, -1);
}
// Callback for the exit timer that exits the semaphore based on exit times
// in the queue and then sets the timer for the nextexit time.
// Credit to Jim: http://www.jackleitch.net/2010/10/better-rate-limiting-with-dot-net/#comment-3620
// for providing the code below, fixing issue #3499 - https://github.com/QuantConnect/Lean/issues/3499
private void ExitTimerCallback(object state)
{
try
{
// While there are exit times that are passed due still in the queue,
// exit the semaphore and dequeue the exit time.
var exitTime = 0;
var exitTimeValid = _exitTimes.TryPeek(out exitTime);
while (exitTimeValid)
{
if (unchecked(exitTime - Environment.TickCount) > 0)
{
break;
}
_semaphore.Release();
_exitTimes.TryDequeue(out exitTime);
exitTimeValid = _exitTimes.TryPeek(out exitTime);
}
// we are already holding the next item from the queue, do not peek again
// although this exit time may have already pass by this stmt.
var timeUntilNextCheck = exitTimeValid
? Math.Min(TimeUnitMilliseconds, Math.Max(0, exitTime - Environment.TickCount))
: TimeUnitMilliseconds;
_exitTimer.Change(timeUntilNextCheck, -1);
}
catch (Exception)
{
// can throw if called when disposing
}
}
/// <summary>
/// Blocks the current thread until allowed to proceed or until the
/// specified timeout elapses.
/// </summary>
/// <param name="millisecondsTimeout">Number of milliseconds to wait, or -1 to wait indefinitely.</param>
/// <returns>true if the thread is allowed to proceed, or false if timed out</returns>
public bool WaitToProceed(int millisecondsTimeout)
{
// Check the arguments.
if (millisecondsTimeout < -1)
throw new ArgumentOutOfRangeException(nameof(millisecondsTimeout));
CheckDisposed();
// Block until we can enter the semaphore or until the timeout expires.
var entered = _semaphore.Wait(millisecondsTimeout);
// If we entered the semaphore, compute the corresponding exit time
// and add it to the queue.
if (entered)
{
var timeToExit = unchecked(Environment.TickCount + TimeUnitMilliseconds);
_exitTimes.Enqueue(timeToExit);
}
return entered;
}
/// <summary>
/// Blocks the current thread until allowed to proceed or until the
/// specified timeout elapses.
/// </summary>
/// <param name="timeout"></param>
/// <returns>true if the thread is allowed to proceed, or false if timed out</returns>
public bool WaitToProceed(TimeSpan timeout)
{
return WaitToProceed((int)timeout.TotalMilliseconds);
}
/// <summary>
/// Blocks the current thread indefinitely until allowed to proceed.
/// </summary>
public void WaitToProceed()
{
WaitToProceed(Timeout.Infinite);
}
// Throws an ObjectDisposedException if this object is disposed.
private void CheckDisposed()
{
if (_isDisposed)
throw new ObjectDisposedException("RateGate is already disposed");
}
/// <summary>
/// Releases unmanaged resources held by an instance of this class.
/// </summary>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
/// <summary>
/// Releases unmanaged resources held by an instance of this class.
/// </summary>
/// <param name="isDisposing">Whether this object is being disposed.</param>
protected virtual void Dispose(bool isDisposing)
{
if (!_isDisposed)
{
if (isDisposing)
{
// The semaphore and timer both implement IDisposable and
// therefore must be disposed.
_semaphore.Dispose();
_exitTimer.Dispose();
_isDisposed = true;
}
}
}
}
}
| |
using System.Collections.Generic;
using System.IO;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using NetworkedPlanet.Brightstar;
using NetworkedPlanet.Brightstar.Storage;
using NetworkedPlanet.Rdf;
using VDS.RDF;
using VDS.RDF.Parsing;
using VDS.RDF.Query;
using System.Linq;
namespace NetworkedPlanet.Brightstar.Tests.Sparql11TestSuite {
[TestClass]
public partial class PropertyPath : SparqlTest {
public PropertyPath() : base()
{
}
[TestInitialize]
public void SetUp()
{
CreateStore();
}
[TestCleanup]
public void TearDown()
{
DeleteStore();
}
#region Test Methods
[TestMethod]
public void Pp01SimplePath() {
ImportData(@"property-path/pp01.ttl");
var result = ExecuteQuery(@"property-path/pp01.rq");
CheckResult(result, @"property-path/pp01.srx", false);
}
[TestMethod]
public void Pp02StarPath() {
ImportData(@"property-path/pp01.ttl");
var result = ExecuteQuery(@"property-path/pp02.rq");
CheckResult(result, @"property-path/pp02.srx", false);
}
[TestMethod]
public void Pp03SimplePathWithLoop() {
ImportData(@"property-path/pp03.ttl");
var result = ExecuteQuery(@"property-path/pp03.rq");
CheckResult(result, @"property-path/pp03.srx", false);
}
[TestMethod]
public void Pp04VariableLengthPathWithLoop() {
ImportData(@"property-path/pp03.ttl");
var result = ExecuteQuery(@"property-path/pp04.rq");
CheckResult(result, @"property-path/pp04.srx", false);
}
[TestMethod]
public void Pp05ZeroLengthPath() {
ImportData(@"property-path/pp05.ttl");
var result = ExecuteQuery(@"property-path/pp05.rq");
CheckResult(result, @"property-path/pp05.srx", false);
}
[TestMethod]
public void Pp08ReversePath() {
ImportData(@"property-path/pp08.ttl");
var result = ExecuteQuery(@"property-path/pp08.rq");
CheckResult(result, @"property-path/pp08.srx", false);
}
[TestMethod]
public void Pp09ReverseSequencePath() {
ImportData(@"property-path/pp09.ttl");
var result = ExecuteQuery(@"property-path/pp09.rq");
CheckResult(result, @"property-path/pp09.srx", false);
}
[TestMethod]
public void Pp10PathWithNegation() {
ImportData(@"property-path/pp10.ttl");
var result = ExecuteQuery(@"property-path/pp10.rq");
CheckResult(result, @"property-path/pp10.srx", false);
}
[TestMethod]
public void Pp11SimplePathAndTwoPathsToSameTargetNode() {
ImportData(@"property-path/pp11.ttl");
var result = ExecuteQuery(@"property-path/pp11.rq");
CheckResult(result, @"property-path/pp11.srx", false);
}
[TestMethod]
public void Pp12VariableLengthPathAndTwoPathsToSameTargetNode() {
ImportData(@"property-path/pp11.ttl");
var result = ExecuteQuery(@"property-path/pp12.rq");
CheckResult(result, @"property-path/pp12.srx", false);
}
[TestMethod]
public void Pp13ZeroLengthPathsWithLiterals() {
ImportData(@"property-path/pp13.ttl");
var result = ExecuteQuery(@"property-path/pp13.rq");
CheckResult(result, @"property-path/pp13.srx", false);
}
[TestMethod]
public void Pp14StarPathOverFoafKnows() {
ImportData(@"property-path/pp14.ttl");
var result = ExecuteQuery(@"property-path/pp14.rq");
CheckResult(result, @"property-path/pp14.srx", false);
}
[TestMethod]
public void Pp15ZeroLengthPathsOnAnEmptyGraph() {
ImportData(@"property-path/empty.ttl");
var result = ExecuteQuery(@"property-path/pp15.rq");
CheckResult(result, @"property-path/pp15.srx", false);
}
[TestMethod]
public void Pp16DuplicatePathsAndCyclesThroughFoafKnows_Asterix_() {
ImportData(@"property-path/pp16.ttl");
var result = ExecuteQuery(@"property-path/pp14.rq");
CheckResult(result, @"property-path/pp16.srx", false);
}
[TestMethod]
public void Pp20DiamondP2() {
ImportData(@"property-path/data-diamond.ttl");
var result = ExecuteQuery(@"property-path/path-2-1.rq");
CheckResult(result, @"property-path/diamond-1.srx", false);
}
[TestMethod]
public void Pp21DiamondP_Plus_() {
ImportData(@"property-path/data-diamond.ttl");
var result = ExecuteQuery(@"property-path/path-2-2.rq");
CheckResult(result, @"property-path/diamond-2.srx", false);
}
[TestMethod]
public void Pp22DiamondWithTailP3() {
ImportData(@"property-path/data-diamond-tail.ttl");
var result = ExecuteQuery(@"property-path/path-2-3.rq");
CheckResult(result, @"property-path/diamond-tail-1.srx", false);
}
[TestMethod]
public void Pp23DiamondWithTailP_Plus_() {
ImportData(@"property-path/data-diamond-tail.ttl");
var result = ExecuteQuery(@"property-path/path-2-2.rq");
CheckResult(result, @"property-path/diamond-tail-2.srx", false);
}
[TestMethod]
public void Pp24DiamondWithLoopP2() {
ImportData(@"property-path/data-diamond-loop.ttl");
var result = ExecuteQuery(@"property-path/path-2-1.rq");
CheckResult(result, @"property-path/diamond-loop-1.srx", false);
}
[TestMethod]
public void Pp25DiamondWithLoopP_Plus_() {
ImportData(@"property-path/data-diamond-loop.ttl");
var result = ExecuteQuery(@"property-path/path-2-2.rq");
CheckResult(result, @"property-path/diamond-loop-2.srx", false);
}
[TestMethod]
public void Pp26DiamondWithLoopP24() {
ImportData(@"property-path/data-diamond-loop.ttl");
var result = ExecuteQuery(@"property-path/path-3-1.rq");
CheckResult(result, @"property-path/diamond-loop-3.srx", false);
}
[TestMethod]
public void Pp27DiamondWithLoopP3() {
ImportData(@"property-path/data-diamond-loop.ttl");
var result = ExecuteQuery(@"property-path/path-3-2.rq");
CheckResult(result, @"property-path/diamond-loop-4.srx", false);
}
[TestMethod]
public void Pp28DiamondWithLoopPP_QuestionMark_() {
ImportData(@"property-path/data-diamond-loop.ttl");
var result = ExecuteQuery(@"property-path/path-3-3.rq");
CheckResult(result, @"property-path/diamond-loop-5.srx", false);
}
[TestMethod]
public void Pp29DiamondWithLoopP2() {
ImportData(@"property-path/data-diamond-loop.ttl");
var result = ExecuteQuery(@"property-path/path-3-4.rq");
CheckResult(result, @"property-path/diamond-loop-6.srx", false);
}
[TestMethod]
public void Pp30OperatorPrecedence1() {
ImportData(@"property-path/path-p1.ttl");
var result = ExecuteQuery(@"property-path/path-p1.rq");
CheckResult(result, @"property-path/path-p1.srx", false);
}
[TestMethod]
public void Pp31OperatorPrecedence2() {
ImportData(@"property-path/path-p1.ttl");
var result = ExecuteQuery(@"property-path/path-p2.rq");
CheckResult(result, @"property-path/path-p2.srx", false);
}
[TestMethod]
public void Pp32OperatorPrecedence3() {
ImportData(@"property-path/path-p3.ttl");
var result = ExecuteQuery(@"property-path/path-p3.rq");
CheckResult(result, @"property-path/path-p3.srx", false);
}
[TestMethod]
public void Pp33OperatorPrecedence4() {
ImportData(@"property-path/path-p3.ttl");
var result = ExecuteQuery(@"property-path/path-p4.rq");
CheckResult(result, @"property-path/path-p4.srx", false);
}
#endregion
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.