context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
#region License
//=============================================================================
// Vici Core - Productivity Library for .NET 3.5
//
// Copyright (c) 2008-2012 Philippe Leybaert
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//=============================================================================
#endregion
using System;
using System.Collections.Generic;
using System.Reflection;
using BEH = Vici.Core.Parser.BinaryExpressionHelper;
namespace Vici.Core.Parser
{
public class BinaryArithmicExpression : BinaryExpression
{
private class OperatorMethod
{
public delegate object Action(string op, object v1, object v2, StringComparison stringComparison, Expression expr);
public readonly Type Type1;
public readonly Type Type2;
public readonly Type ReturnType;
public readonly Action Function;
public OperatorMethod(Type type, Action function)
{
Type1 = type;
Type2 = type;
ReturnType = type;
Function = function;
}
public OperatorMethod(Type returnType, Type type1, Type type2, Action function)
{
ReturnType = returnType;
Type1 = type1;
Type2 = type2;
Function = function;
}
}
private readonly string _operator;
public BinaryArithmicExpression(TokenPosition position, string op, Expression left, Expression right) : base(position, left, right)
{
_operator = op;
}
static BinaryArithmicExpression()
{
_operatorMethods["+"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(decimal), BinaryExpressionHelper.CalcDecimal),
new OperatorMethod(typeof(string), BinaryExpressionHelper.CalcString),
new OperatorMethod(typeof(string), typeof(string), typeof(object), BinaryExpressionHelper.CalcStringObject),
new OperatorMethod(typeof(string), typeof(object), typeof(string), BinaryExpressionHelper.CalcObjectString)
};
_operatorMethods["-"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(decimal), BinaryExpressionHelper.CalcDecimal)
};
_operatorMethods["*"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(decimal), BinaryExpressionHelper.CalcDecimal)
};
_operatorMethods["/"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(decimal), BinaryExpressionHelper.CalcDecimal)
};
_operatorMethods["%"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(decimal), BinaryExpressionHelper.CalcDecimal)
};
_operatorMethods["<<"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), typeof(uint), typeof(int), BinaryExpressionHelper.CalcUInt32_Int32),
new OperatorMethod(typeof(long), typeof(long), typeof(int), BinaryExpressionHelper.CalcInt64_Int32),
new OperatorMethod(typeof(ulong), typeof(ulong), typeof(int), BinaryExpressionHelper.CalcUInt64_Int32)
};
_operatorMethods[">>"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), typeof(uint), typeof(int), BinaryExpressionHelper.CalcUInt32_Int32),
new OperatorMethod(typeof(long), typeof(long), typeof(int), BinaryExpressionHelper.CalcInt64_Int32),
new OperatorMethod(typeof(ulong), typeof(ulong), typeof(int), BinaryExpressionHelper.CalcUInt64_Int32)
};
_operatorMethods["=="] = new[]
{
new OperatorMethod(typeof(bool), typeof(int), typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(bool), typeof(uint), typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(bool), typeof(long), typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(bool), typeof(ulong), typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), typeof(float), typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(bool), typeof(double), typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(bool), typeof(decimal), typeof(decimal), BinaryExpressionHelper.CalcDecimal),
new OperatorMethod(typeof(bool), typeof(string), typeof(string), BinaryExpressionHelper.CalcString),
new OperatorMethod(typeof(bool), BinaryExpressionHelper.CalcBool)
};
_operatorMethods["!="] = new[]
{
new OperatorMethod(typeof(bool), typeof(int), typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(bool), typeof(uint), typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(bool), typeof(long), typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(bool), typeof(ulong), typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), typeof(float), typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(bool), typeof(double), typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(bool), typeof(decimal), typeof(decimal), BinaryExpressionHelper.CalcDecimal),
new OperatorMethod(typeof(bool), typeof(string), typeof(string), BinaryExpressionHelper.CalcString),
new OperatorMethod(typeof(bool), BinaryExpressionHelper.CalcBool)
};
_operatorMethods["<"] = new[]
{
new OperatorMethod(typeof(bool), typeof(int), typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(bool), typeof(uint), typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(bool), typeof(long), typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(bool), typeof(ulong), typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), typeof(float), typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(bool), typeof(double), typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(bool), typeof(decimal), typeof(decimal), BinaryExpressionHelper.CalcDecimal),
};
_operatorMethods[">"] = new[]
{
new OperatorMethod(typeof(bool), typeof(int), typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(bool), typeof(uint), typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(bool), typeof(long), typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(bool), typeof(ulong), typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), typeof(float), typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(bool), typeof(double), typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(bool), typeof(decimal), typeof(decimal), BinaryExpressionHelper.CalcDecimal),
};
_operatorMethods["<="] = new[]
{
new OperatorMethod(typeof(bool), typeof(int), typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(bool), typeof(uint), typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(bool), typeof(long), typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(bool), typeof(ulong), typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), typeof(float), typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(bool), typeof(double), typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(bool), typeof(decimal), typeof(decimal), BinaryExpressionHelper.CalcDecimal),
};
_operatorMethods[">="] = new[]
{
new OperatorMethod(typeof(bool), typeof(int), typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(bool), typeof(uint), typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(bool), typeof(long), typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(bool), typeof(ulong), typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), typeof(float), typeof(float), BinaryExpressionHelper.CalcFloat),
new OperatorMethod(typeof(bool), typeof(double), typeof(double), BinaryExpressionHelper.CalcDouble),
new OperatorMethod(typeof(bool), typeof(decimal), typeof(decimal), BinaryExpressionHelper.CalcDecimal),
};
_operatorMethods["&"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), BinaryExpressionHelper.CalcBool)
};
_operatorMethods["|"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), BinaryExpressionHelper.CalcBool)
};
_operatorMethods["^"] = new[]
{
new OperatorMethod(typeof(int), BinaryExpressionHelper.CalcInt32),
new OperatorMethod(typeof(uint), BinaryExpressionHelper.CalcUInt32),
new OperatorMethod(typeof(long), BinaryExpressionHelper.CalcInt64),
new OperatorMethod(typeof(ulong), BinaryExpressionHelper.CalcUInt64),
new OperatorMethod(typeof(bool), BinaryExpressionHelper.CalcBool)
};
_operatorOverloadNames["+"] = "op_Addition";
_operatorOverloadNames["-"] = "op_Subtraction";
_operatorOverloadNames["<"] = "op_LessThan";
_operatorOverloadNames["<="] = "op_LessThanOrEqual";
_operatorOverloadNames[">"] = "op_GreaterThan";
_operatorOverloadNames[">="] = "op_GreaterThanOrEqual";
_operatorOverloadNames["=="] = "op_Equality";
_operatorOverloadNames["!="] = "op_Inequality";
}
static readonly Dictionary<string,OperatorMethod[]> _operatorMethods = new Dictionary<string, OperatorMethod[]>();
static readonly Dictionary<string,string> _operatorOverloadNames = new Dictionary<string, string>();
public string Operator
{
get { return _operator; }
}
public override ValueExpression Evaluate(IParserContext context)
{
ValueExpression[] values = new[] { Left.Evaluate(context), Right.Evaluate(context) };
Type type1 = values[0].Type;
Type type2 = values[1].Type;
bool nullable1 = type1.Inspector().IsNullable;
bool nullable2 = type2.Inspector().IsNullable;
type1 = type1.Inspector().RealType;
type2 = type2.Inspector().RealType;
bool isNullable = (nullable1 || nullable2);
OperatorMethod operatorMethod = FindOperatorMethod(type1, type2);
if (operatorMethod == null)
{
Type promotionType = null;
if (type1 == typeof(decimal) || type2 == typeof(decimal))
promotionType = typeof(decimal);
else if (type1 == typeof(double) || type2 == typeof(double))
promotionType = typeof(double);
else if (type1 == typeof(float) || type2 == typeof(float))
promotionType = typeof(float);
else if (type1 == typeof(ulong) || type2 == typeof(ulong))
promotionType = typeof(ulong);
else if (type1 == typeof(long) || type2 == typeof(long))
promotionType = typeof(long);
else if (type1 == typeof(uint) || type2 == typeof(uint) && (type1 == typeof(sbyte) || type2 == typeof(sbyte) || type1 == typeof(short) || type2 == typeof(short) || type1 == typeof(int) || type2 == typeof(int)))
promotionType = typeof(long);
else if (type1 == typeof(uint) || type2 == typeof(uint))
promotionType = typeof(uint);
else if (type1.Inspector().IsPrimitive && type2.Inspector().IsPrimitive && type1 != typeof(bool) && type2 != typeof(bool))
promotionType = typeof(int);
if (promotionType != null)
{
type1 = promotionType;
type2 = promotionType;
}
operatorMethod = FindOperatorMethod(type1, type2);
}
if (operatorMethod == null)
{
MethodInfo customOperatorMethod = type1.Inspector().GetMethod(_operatorOverloadNames[_operator], new[] { type1, type2 });
if (customOperatorMethod != null)
{
return new ValueExpression(TokenPosition, customOperatorMethod.Invoke(null, new[] { values[0].Value, values[1].Value }), customOperatorMethod.ReturnType);
}
if (_operator == "==" || _operator == "!=")
return new ValueExpression(TokenPosition, BinaryExpressionHelper.CalcObject(_operator, values[0].Value, values[1].Value, context.StringComparison, this), typeof(bool));
throw new IllegalOperandsException("Operator " + _operator + " is not supported on " + values[0] + " and " + values[1], this);
}
Type returnType = operatorMethod.ReturnType;
if (isNullable)
{
returnType = typeof(Nullable<>).MakeGenericType(returnType);
//TODO: check specs for bool? values
if (values[0].Value == null || values[1].Value == null)
return new ValueExpression(TokenPosition, null, returnType);
}
object value1 = Convert.ChangeType(values[0].Value, operatorMethod.Type1, null);
object value2 = Convert.ChangeType(values[1].Value, operatorMethod.Type2, null);
return new ValueExpression(TokenPosition, operatorMethod.Function(_operator, value1, value2, context.StringComparison, this), returnType);
}
private OperatorMethod FindOperatorMethod(Type type1, Type type2)
{
OperatorMethod[] operatorMethods = _operatorMethods[_operator];
foreach (OperatorMethod operatorMethod in operatorMethods)
{
bool sameType1 = type1 == operatorMethod.Type1;
bool sameType2 = type2 == operatorMethod.Type2;
bool canConvert1 = operatorMethod.Type1.Inspector().IsAssignableFrom(type1);
bool canConvert2 = operatorMethod.Type2.Inspector().IsAssignableFrom(type2);
if ((sameType1 || canConvert1) && (sameType2 || canConvert2))
{
return operatorMethod;
}
}
return null;
}
#if DEBUG
public override string ToString()
{
return "(" + Left + " " + _operator + " " + Right + ")";
}
#endif
}
}
| |
/*
* PasswordDeriveBytes.cs - Implementation of the
* "System.Security.Cryptography.PasswordDeriveBytes" class.
*
* Copyright (C) 2002 Southern Storm Software, Pty Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
namespace System.Security.Cryptography
{
#if CONFIG_CRYPTO
using System;
using System.Text;
// Note: the implementation of this class is based on PKCS #5 version 2.0.
public class PasswordDeriveBytes : DeriveBytes
{
// Internal state.
private String strPassword;
internal byte[] rgbSalt;
private String strHashName;
internal int iterations;
private HashAlgorithm hashAlgorithm;
private int blockNum, posn, size;
private byte[] block;
// Constructors.
public PasswordDeriveBytes(String strPassword, byte[] rgbSalt)
: this(strPassword, rgbSalt, null, 0, null) {}
public PasswordDeriveBytes(String strPassword, byte[] rgbSalt,
CspParameters cspParams)
: this(strPassword, rgbSalt, null, 0, cspParams) {}
public PasswordDeriveBytes(String strPassword, byte[] rgbSalt,
String strHashName, int iterations)
: this(strPassword, rgbSalt, strHashName, iterations, null) {}
public PasswordDeriveBytes(String strPassword, byte[] rgbSalt,
String strHashName, int iterations,
CspParameters cspParams)
{
this.strPassword = strPassword;
this.rgbSalt = rgbSalt;
this.strHashName = strHashName;
this.iterations = iterations;
}
// Destructor.
~PasswordDeriveBytes()
{
blockNum = 0;
if(block != null)
{
Array.Clear(block, 0, block.Length);
}
}
// Get or set the name of the hash algorithm.
public String HashName
{
get
{
return strHashName;
}
set
{
if(strHashName == null)
{
strHashName = value;
}
else
{
throw new CryptographicException
(_("Crypto_HashAlreadySet"));
}
}
}
// Get or set the iteration count.
public int IterationCount
{
get
{
return iterations;
}
set
{
if(iterations == 0)
{
iterations = value;
}
else
{
throw new CryptographicException
(_("Crypto_CountAlreadySet"));
}
}
}
// Get or set the salt.
public byte[] Salt
{
get
{
return rgbSalt;
}
set
{
if(rgbSalt == null)
{
rgbSalt = value;
}
else
{
throw new CryptographicException
(_("Crypto_SaltAlreadySet"));
}
}
}
// Derive a key for a specific cryptographic algorithm.
public byte[] CryptDeriveKey(String algname, String alghashname,
int keySize, byte[] rgbIV)
{
if((algname == "DES" || algname == "RC2") &&
alghashname == "MD5" && keySize == 8)
{
// Use the older PKCS #5 password generation routine.
MD5 md5 = new MD5CryptoServiceProvider();
if(strPassword != null)
{
byte[] pwd = Encoding.UTF8.GetBytes(strPassword);
md5.InternalHashCore(pwd, 0, pwd.Length);
Array.Clear(pwd, 0, pwd.Length);
}
if(rgbSalt != null)
{
md5.InternalHashCore(rgbSalt, 0, rgbSalt.Length);
}
byte[] tempHash = md5.InternalHashFinal();
md5.Initialize();
int count = iterations;
while(count > 1)
{
md5.InternalHashCore(tempHash, 0, tempHash.Length);
Array.Clear(tempHash, 0, tempHash.Length);
tempHash = md5.InternalHashFinal();
md5.Initialize();
--count;
}
byte[] key = new byte [8];
Array.Copy(tempHash, 0, key, 0, 8);
if(rgbIV != null)
{
Array.Copy(tempHash, 8, rgbIV, 0, 8);
}
Array.Clear(tempHash, 0, tempHash.Length);
return key;
}
else
{
// Use the newer PKCS #5 password generation routine.
Reset();
if(alghashname != null)
{
strHashName = alghashname;
}
byte[] result = GetBytes(keySize);
if(rgbIV != null)
{
byte[] iv = GetBytes(rgbIV.Length);
Array.Copy(iv, 0, rgbIV, 0, rgbIV.Length);
Array.Clear(iv, 0, iv.Length);
}
return result;
}
}
// Get the pseudo-random key bytes.
public override byte[] GetBytes(int cb)
{
// Initialize the pseudo-random generator.
if(hashAlgorithm == null)
{
if(strHashName == null)
{
strHashName = "MD5";
}
hashAlgorithm = HashAlgorithm.Create(strHashName);
blockNum = 1;
size = hashAlgorithm.HashSize;
posn = size;
}
// Allocate the result array and then fill it.
byte[] result = new byte [cb];
int index = 0;
int templen;
while(cb > 0)
{
// Copy existing data from the previous block.
if(posn < size)
{
templen = (size - posn);
if(cb < templen)
{
templen = cb;
}
Array.Copy(block, posn, result, index, templen);
cb -= templen;
index -= templen;
posn = size;
if(cb <= 0)
{
break;
}
}
// Generate a new block using the hash algorithm.
if(strPassword != null)
{
byte[] pwd = Encoding.UTF8.GetBytes(strPassword);
hashAlgorithm.InternalHashCore(pwd, 0, pwd.Length);
Array.Clear(pwd, 0, pwd.Length);
}
if(rgbSalt != null)
{
hashAlgorithm.InternalHashCore
(rgbSalt, 0, rgbSalt.Length);
}
byte[] numbuf = new byte [4];
numbuf[0] = (byte)(blockNum >> 24);
numbuf[1] = (byte)(blockNum >> 16);
numbuf[2] = (byte)(blockNum >> 8);
numbuf[3] = (byte)blockNum;
hashAlgorithm.InternalHashCore(numbuf, 0, 4);
Array.Clear(numbuf, 0, numbuf.Length);
byte[] lastHash = hashAlgorithm.InternalHashFinal();
hashAlgorithm.Initialize();
templen = iterations;
byte[] temphash;
while(templen > 1)
{
hashAlgorithm.InternalHashCore
(lastHash, 0, lastHash.Length);
temphash = hashAlgorithm.InternalHashFinal();
hashAlgorithm.Initialize();
for(int tempindex = 0; tempindex < lastHash.Length;
++tempindex)
{
lastHash[tempindex] ^= temphash[tempindex];
}
Array.Clear(temphash, 0, temphash.Length);
--templen;
}
if(block != null)
{
Array.Clear(block, 0, block.Length);
}
block = lastHash;
++blockNum;
posn = 0;
}
// Return the result array to the caller.
return result;
}
// Reset the state.
public override void Reset()
{
hashAlgorithm = null;
blockNum = 0;
if(block != null)
{
Array.Clear(block, 0, block.Length);
}
}
}; // class PasswordDeriveBytes
#endif // CONFIG_CRYPTO
}; // namespace System.Security.Cryptography
| |
using System;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
using System.Windows.Resources;
using Microsoft.Phone.Tasks;
using Microsoft.Devices;
using rhoruntime;
using ZXing;
namespace rho {
namespace BarcodeImpl
{
public class Barcode : BarcodeBase
{
public static int vibrateTimeMs = 100;
private BarcodeReaderLib.OpticalReaderTask _barcodeScanTask = null;
private IBarcodeReader _barcodeReader = new BarcodeReader();
static EventWaitHandle _waitHandle = new AutoResetEvent(false);
static String _recognizeResult = "";
private IMethodResult _methodResult = null;
private string _effectiveRubyCallbackURL = null;
private String _prevScanResult = "";
public Barcode()
{
if (BarcodeReaderLib.OpticalReaderTask.Instance == null)
_barcodeScanTask = new BarcodeReaderLib.OpticalReaderTask();
else
_barcodeScanTask = BarcodeReaderLib.OpticalReaderTask.Instance;
_barcodeScanTask.Completed += BarcodeScanTask_Completed;
}
public override void setNativeImpl(string strID, long native)
{
base.setNativeImpl(strID, native);
IMethodResult pResult = new CMethodResultImpl();
setProperty("scannerType", "camera", pResult);
}
public override void getAutoEnter(IMethodResult oResult) { }
public override void setAutoEnter(bool autoEnter, IMethodResult oResult) { }
public override void getAutoTab(IMethodResult oResult) { }
public override void setAutoTab(bool autoTab, IMethodResult oResult) { }
public override void getHapticFeedback(IMethodResult oResult) { }
public override void setHapticFeedback(bool hapticFeedback, IMethodResult oResult) { }
public override void getLinearSecurityLevel(IMethodResult oResult) { }
public override void setLinearSecurityLevel(string linearSecurityLevel, IMethodResult oResult) { }
public override void getScanTimeout(IMethodResult oResult) { }
public override void setScanTimeout(int scanTimeout, IMethodResult oResult) { }
public override void getRasterMode(IMethodResult oResult) { }
public override void setRasterMode(string rasterMode, IMethodResult oResult) { }
public override void getRasterHeight(IMethodResult oResult) { }
public override void setRasterHeight(int rasterHeight, IMethodResult oResult) { }
public override void getAimType(IMethodResult oResult) { }
public override void setAimType(string aimType, IMethodResult oResult) { }
public override void getTimedAimDuration(IMethodResult oResult) { }
public override void setTimedAimDuration(int timedAimDuration, IMethodResult oResult) { }
public override void getSameSymbolTimeout(IMethodResult oResult) { }
public override void setSameSymbolTimeout(int sameSymbolTimeout, IMethodResult oResult) { }
public override void getDifferentSymbolTimeout(IMethodResult oResult) { }
public override void setDifferentSymbolTimeout(int differentSymbolTimeout, IMethodResult oResult) { }
public override void getAimMode(IMethodResult oResult) { }
public override void setAimMode(string aimMode, IMethodResult oResult) { }
public override void getPicklistMode(IMethodResult oResult) { }
public override void setPicklistMode(string picklistMode, IMethodResult oResult) { }
public override void getViewfinderMode(IMethodResult oResult) { }
public override void setViewfinderMode(string viewfinderMode, IMethodResult oResult) { }
public override void getViewfinderX(IMethodResult oResult) { }
public override void setViewfinderX(int viewfinderX, IMethodResult oResult) { }
public override void getViewfinderY(IMethodResult oResult) { }
public override void setViewfinderY(int viewfinderY, IMethodResult oResult) { }
public override void getViewfinderWidth(IMethodResult oResult) { }
public override void setViewfinderWidth(int viewfinderWidth, IMethodResult oResult) { }
public override void getViewfinderHeight(IMethodResult oResult) { }
public override void setViewfinderHeight(int viewfinderHeight, IMethodResult oResult) { }
public override void getViewfinderFeedback(IMethodResult oResult) { }
public override void setViewfinderFeedback(string viewfinderFeedback, IMethodResult oResult) { }
public override void getViewfinderFeedbackTime(IMethodResult oResult) { }
public override void setViewfinderFeedbackTime(int viewfinderFeedbackTime, IMethodResult oResult) { }
public override void getFocusMode(IMethodResult oResult) { }
public override void setFocusMode(string focusMode, IMethodResult oResult) { }
public override void getIlluminationMode(IMethodResult oResult) { }
public override void setIlluminationMode(string illuminationMode, IMethodResult oResult) { }
public override void getDpmMode(IMethodResult oResult) { }
public override void setDpmMode(bool dpmMode, IMethodResult oResult) { }
public override void getInverse1dMode(IMethodResult oResult) { }
public override void setInverse1dMode(string inverse1dMode, IMethodResult oResult) { }
public override void getPoorQuality1dMode(IMethodResult oResult) { }
public override void setPoorQuality1dMode(bool poorQuality1dMode, IMethodResult oResult) { }
public override void getBeamWidth(IMethodResult oResult) { }
public override void setBeamWidth(string beamWidth, IMethodResult oResult) { }
public override void getDbpMode(IMethodResult oResult) { }
public override void setDbpMode(string dbpMode, IMethodResult oResult) { }
public override void getKlasseEins(IMethodResult oResult) { }
public override void setKlasseEins(bool klasseEins, IMethodResult oResult) { }
public override void getAdaptiveScanning(IMethodResult oResult) { }
public override void setAdaptiveScanning(bool adaptiveScanning, IMethodResult oResult) { }
public override void getBidirectionalRedundancy(IMethodResult oResult) { }
public override void setBidirectionalRedundancy(bool bidirectionalRedundancy, IMethodResult oResult) { }
public override void getBarcodeDataFormat(IMethodResult oResult) { }
public override void setBarcodeDataFormat(string barcodeDataFormat, IMethodResult oResult) { }
public override void getDataBufferSize(IMethodResult oResult) { }
public override void setDataBufferSize(int dataBufferSize, IMethodResult oResult) { }
public override void getConnectionIdleTimeout(IMethodResult oResult) { }
public override void setConnectionIdleTimeout(int connectionIdleTimeout, IMethodResult oResult) { }
public override void getDisconnectBtOnDisable(IMethodResult oResult) { }
public override void setDisconnectBtOnDisable(bool disconnectBtOnDisable, IMethodResult oResult) { }
public override void getDisplayBtAddressBarcodeOnEnable(IMethodResult oResult) { }
public override void setDisplayBtAddressBarcodeOnEnable(bool displayBtAddressBarcodeOnEnable, IMethodResult oResult) { }
public override void getEnableTimeout(IMethodResult oResult) { }
public override void setEnableTimeout(int enableTimeout, IMethodResult oResult) { }
public override void getFriendlyName(IMethodResult oResult) { }
public override void getLcdMode(IMethodResult oResult) { }
public override void setLcdMode(bool lcdMode, IMethodResult oResult) { }
public override void getLowBatteryScan(IMethodResult oResult) { }
public override void setLowBatteryScan(bool lowBatteryScan, IMethodResult oResult) { }
public override void getTriggerConnected(IMethodResult oResult) { }
public override void setTriggerConnected(bool triggerConnected, IMethodResult oResult) { }
public override void getDisableScannerDuringNavigate(IMethodResult oResult) { }
public override void setDisableScannerDuringNavigate(bool disableScannerDuringNavigate, IMethodResult oResult) { }
public override void getDecodeVolume(IMethodResult oResult) { }
public override void setDecodeVolume(int decodeVolume, IMethodResult oResult) { }
public override void getDecodeDuration(IMethodResult oResult) { }
public override void setDecodeDuration(int decodeDuration, IMethodResult oResult) { }
public override void getDecodeFrequency(IMethodResult oResult) { }
public override void setDecodeFrequency(int decodeFrequency, IMethodResult oResult) { }
public override void getInvalidDecodeFrequency(IMethodResult oResult) { }
public override void setInvalidDecodeFrequency(int invalidDecodeFrequency, IMethodResult oResult) { }
public override void getDecodeSound(IMethodResult oResult) { }
public override void setDecodeSound(string decodeSound, IMethodResult oResult) { }
public override void getInvalidDecodeSound(IMethodResult oResult) { }
public override void setInvalidDecodeSound(string invalidDecodeSound, IMethodResult oResult) { }
public override void getAllDecoders(IMethodResult oResult) { }
public override void setAllDecoders(bool allDecoders, IMethodResult oResult) { }
public override void getAztec(IMethodResult oResult) { }
public override void setAztec(bool aztec, IMethodResult oResult) { }
public override void getChinese2of5(IMethodResult oResult) { }
public override void setChinese2of5(bool chinese2of5, IMethodResult oResult) { }
public override void getCodabar(IMethodResult oResult) { }
public override void setCodabar(bool codabar, IMethodResult oResult) { }
public override void getCodabarClsiEditing(IMethodResult oResult) { }
public override void setCodabarClsiEditing(bool codabarClsiEditing, IMethodResult oResult) { }
public override void getCodabarMaxLength(IMethodResult oResult) { }
public override void setCodabarMaxLength(int codabarMaxLength, IMethodResult oResult) { }
public override void getCodabarMinLength(IMethodResult oResult) { }
public override void setCodabarMinLength(int codabarMinLength, IMethodResult oResult) { }
public override void getCodabarNotisEditing(IMethodResult oResult) { }
public override void setCodabarNotisEditing(bool codabarNotisEditing, IMethodResult oResult) { }
public override void getCodabarRedundancy(IMethodResult oResult) { }
public override void setCodabarRedundancy(bool codabarRedundancy, IMethodResult oResult) { }
public override void getCode11(IMethodResult oResult) { }
public override void setCode11(bool code11, IMethodResult oResult) { }
public override void getCode11checkDigitCount(IMethodResult oResult) { }
public override void setCode11checkDigitCount(string code11checkDigitCount, IMethodResult oResult) { }
public override void getCode11maxLength(IMethodResult oResult) { }
public override void setCode11maxLength(int code11maxLength, IMethodResult oResult) { }
public override void getCode11minLength(IMethodResult oResult) { }
public override void setCode11minLength(int code11minLength, IMethodResult oResult) { }
public override void getCode11redundancy(IMethodResult oResult) { }
public override void setCode11redundancy(bool code11redundancy, IMethodResult oResult) { }
public override void getCode11reportCheckDigit(IMethodResult oResult) { }
public override void setCode11reportCheckDigit(bool code11reportCheckDigit, IMethodResult oResult) { }
public override void getCode128(IMethodResult oResult) { }
public override void setCode128(bool code128, IMethodResult oResult) { }
public override void getCode128checkIsBtTable(IMethodResult oResult) { }
public override void setCode128checkIsBtTable(bool code128checkIsBtTable, IMethodResult oResult) { }
public override void getCode128ean128(IMethodResult oResult) { }
public override void setCode128ean128(bool code128ean128, IMethodResult oResult) { }
public override void getCode128isbt128(IMethodResult oResult) { }
public override void setCode128isbt128(bool code128isbt128, IMethodResult oResult) { }
public override void getCode128isbt128ConcatMode(IMethodResult oResult) { }
public override void setCode128isbt128ConcatMode(string code128isbt128ConcatMode, IMethodResult oResult) { }
public override void getCode128maxLength(IMethodResult oResult) { }
public override void setCode128maxLength(int code128maxLength, IMethodResult oResult) { }
public override void getCode128minLength(IMethodResult oResult) { }
public override void setCode128minLength(int code128minLength, IMethodResult oResult) { }
public override void getCode128other128(IMethodResult oResult) { }
public override void setCode128other128(bool code128other128, IMethodResult oResult) { }
public override void getCode128redundancy(IMethodResult oResult) { }
public override void setCode128redundancy(bool code128redundancy, IMethodResult oResult) { }
public override void getCode128securityLevel(IMethodResult oResult) { }
public override void setCode128securityLevel(int code128securityLevel, IMethodResult oResult) { }
public override void getCompositeAb(IMethodResult oResult) { }
public override void setCompositeAb(bool compositeAb, IMethodResult oResult) { }
public override void getCompositeAbUccLinkMode(IMethodResult oResult) { }
public override void setCompositeAbUccLinkMode(string compositeAbUccLinkMode, IMethodResult oResult) { }
public override void getCompositeAbUseUpcPreambleCheckDigitRules(IMethodResult oResult) { }
public override void setCompositeAbUseUpcPreambleCheckDigitRules(bool compositeAbUseUpcPreambleCheckDigitRules, IMethodResult oResult) { }
public override void getCompositeC(IMethodResult oResult) { }
public override void setCompositeC(bool compositeC, IMethodResult oResult) { }
public override void getCode39(IMethodResult oResult) { }
public override void setCode39(bool code39, IMethodResult oResult) { }
public override void getCode39code32Prefix(IMethodResult oResult) { }
public override void setCode39code32Prefix(bool code39code32Prefix, IMethodResult oResult) { }
public override void getCode39convertToCode32(IMethodResult oResult) { }
public override void setCode39convertToCode32(bool code39convertToCode32, IMethodResult oResult) { }
public override void getCode39fullAscii(IMethodResult oResult) { }
public override void setCode39fullAscii(bool code39fullAscii, IMethodResult oResult) { }
public override void getCode39maxLength(IMethodResult oResult) { }
public override void setCode39maxLength(int code39maxLength, IMethodResult oResult) { }
public override void getCode39minLength(IMethodResult oResult) { }
public override void setCode39minLength(int code39minLength, IMethodResult oResult) { }
public override void getCode39redundancy(IMethodResult oResult) { }
public override void setCode39redundancy(bool code39redundancy, IMethodResult oResult) { }
public override void getCode39reportCheckDigit(IMethodResult oResult) { }
public override void setCode39reportCheckDigit(bool code39reportCheckDigit, IMethodResult oResult) { }
public override void getCode39securityLevel(IMethodResult oResult) { }
public override void setCode39securityLevel(int code39securityLevel, IMethodResult oResult) { }
public override void getCode39verifyCheckDigit(IMethodResult oResult) { }
public override void setCode39verifyCheckDigit(bool code39verifyCheckDigit, IMethodResult oResult) { }
public override void getCode93(IMethodResult oResult) { }
public override void setCode93(bool code93, IMethodResult oResult) { }
public override void getCode93maxLength(IMethodResult oResult) { }
public override void setCode93maxLength(int code93maxLength, IMethodResult oResult) { }
public override void getCode93minLength(IMethodResult oResult) { }
public override void setCode93minLength(int code93minLength, IMethodResult oResult) { }
public override void getCode93redundancy(IMethodResult oResult) { }
public override void setCode93redundancy(bool code93redundancy, IMethodResult oResult) { }
public override void getD2of5(IMethodResult oResult) { }
public override void setD2of5(bool d2of5, IMethodResult oResult) { }
public override void getD2of5maxLength(IMethodResult oResult) { }
public override void setD2of5maxLength(int d2of5maxLength, IMethodResult oResult) { }
public override void getD2of5minLength(IMethodResult oResult) { }
public override void setD2of5minLength(int d2of5minLength, IMethodResult oResult) { }
public override void getD2of5redundancy(IMethodResult oResult) { }
public override void setD2of5redundancy(bool d2of5redundancy, IMethodResult oResult) { }
public override void getDatamatrix(IMethodResult oResult) { }
public override void setDatamatrix(bool datamatrix, IMethodResult oResult) { }
public override void getEan13(IMethodResult oResult) { }
public override void setEan13(bool ean13, IMethodResult oResult) { }
public override void getEan8(IMethodResult oResult) { }
public override void setEan8(bool ean8, IMethodResult oResult) { }
public override void getEan8convertToEan13(IMethodResult oResult) { }
public override void setEan8convertToEan13(bool ean8convertToEan13, IMethodResult oResult) { }
public override void getI2of5(IMethodResult oResult) { }
public override void setI2of5(bool i2of5, IMethodResult oResult) { }
public override void getI2of5convertToEan13(IMethodResult oResult) { }
public override void setI2of5convertToEan13(bool i2of5convertToEan13, IMethodResult oResult) { }
public override void getI2of5maxLength(IMethodResult oResult) { }
public override void setI2of5maxLength(int i2of5maxLength, IMethodResult oResult) { }
public override void getI2of5minLength(IMethodResult oResult) { }
public override void setI2of5minLength(int i2of5minLength, IMethodResult oResult) { }
public override void getI2of5redundancy(IMethodResult oResult) { }
public override void setI2of5redundancy(bool i2of5redundancy, IMethodResult oResult) { }
public override void getI2of5reportCheckDigit(IMethodResult oResult) { }
public override void setI2of5reportCheckDigit(bool i2of5reportCheckDigit, IMethodResult oResult) { }
public override void getI2of5verifyCheckDigit(IMethodResult oResult) { }
public override void setI2of5verifyCheckDigit(string i2of5verifyCheckDigit, IMethodResult oResult) { }
public override void getKorean3of5(IMethodResult oResult) { }
public override void setKorean3of5(bool korean3of5, IMethodResult oResult) { }
public override void getKorean3of5redundancy(IMethodResult oResult) { }
public override void setKorean3of5redundancy(bool korean3of5redundancy, IMethodResult oResult) { }
public override void getKorean3of5maxLength(IMethodResult oResult) { }
public override void setKorean3of5maxLength(int korean3of5maxLength, IMethodResult oResult) { }
public override void getKorean3of5minLength(IMethodResult oResult) { }
public override void setKorean3of5minLength(int korean3of5minLength, IMethodResult oResult) { }
public override void getMacroPdf(IMethodResult oResult) { }
public override void setMacroPdf(bool macroPdf, IMethodResult oResult) { }
public override void getMacroPdfBufferLabels(IMethodResult oResult) { }
public override void setMacroPdfBufferLabels(bool macroPdfBufferLabels, IMethodResult oResult) { }
public override void getMacroPdfConvertToPdf417(IMethodResult oResult) { }
public override void setMacroPdfConvertToPdf417(bool macroPdfConvertToPdf417, IMethodResult oResult) { }
public override void getMacroPdfExclusive(IMethodResult oResult) { }
public override void setMacroPdfExclusive(bool macroPdfExclusive, IMethodResult oResult) { }
public override void getMacroMicroPdf(IMethodResult oResult) { }
public override void setMacroMicroPdf(bool macroMicroPdf, IMethodResult oResult) { }
public override void getMacroMicroPdfBufferLabels(IMethodResult oResult) { }
public override void setMacroMicroPdfBufferLabels(bool macroMicroPdfBufferLabels, IMethodResult oResult) { }
public override void getMacroMicroPdfConvertToMicroPdf(IMethodResult oResult) { }
public override void setMacroMicroPdfConvertToMicroPdf(bool macroMicroPdfConvertToMicroPdf, IMethodResult oResult) { }
public override void getMacroMicroPdfExclusive(IMethodResult oResult) { }
public override void setMacroMicroPdfExclusive(bool macroMicroPdfExclusive, IMethodResult oResult) { }
public override void getMacroMicroPdfReportAppendInfo(IMethodResult oResult) { }
public override void setMacroMicroPdfReportAppendInfo(bool macroMicroPdfReportAppendInfo, IMethodResult oResult) { }
public override void getMatrix2of5(IMethodResult oResult) { }
public override void setMatrix2of5(bool matrix2of5, IMethodResult oResult) { }
public override void getMatrix2of5maxLength(IMethodResult oResult) { }
public override void setMatrix2of5maxLength(int matrix2of5maxLength, IMethodResult oResult) { }
public override void getMatrix2of5minLength(IMethodResult oResult) { }
public override void setMatrix2of5minLength(int matrix2of5minLength, IMethodResult oResult) { }
public override void getMatrix2of5reportCheckDigit(IMethodResult oResult) { }
public override void setMatrix2of5reportCheckDigit(bool matrix2of5reportCheckDigit, IMethodResult oResult) { }
public override void getMatrix2of5verifyCheckDigit(IMethodResult oResult) { }
public override void setMatrix2of5verifyCheckDigit(bool matrix2of5verifyCheckDigit, IMethodResult oResult) { }
public override void getMaxiCode(IMethodResult oResult) { }
public override void setMaxiCode(bool maxiCode, IMethodResult oResult) { }
public override void getMicroPdf(IMethodResult oResult) { }
public override void setMicroPdf(bool microPdf, IMethodResult oResult) { }
public override void getMicroQr(IMethodResult oResult) { }
public override void setMicroQr(bool microQr, IMethodResult oResult) { }
public override void getMsi(IMethodResult oResult) { }
public override void setMsi(bool msi, IMethodResult oResult) { }
public override void getMsiCheckDigits(IMethodResult oResult){}
public override void setMsiCheckDigits(string msiCheckDigits, IMethodResult oResult) { }
public override void getMsiCheckDigitScheme(IMethodResult oResult) { }
public override void setMsiCheckDigitScheme(string msiCheckDigitScheme, IMethodResult oResult) { }
public override void getMsiMaxLength(IMethodResult oResult) { }
public override void setMsiMaxLength(int msiMaxLength, IMethodResult oResult) { }
public override void getMsiMinLength(IMethodResult oResult) { }
public override void setMsiMinLength(int msiMinLength, IMethodResult oResult) { }
public override void getMsiRedundancy(IMethodResult oResult) { }
public override void setMsiRedundancy(bool msiRedundancy, IMethodResult oResult) { }
public override void getMsiReportCheckDigit(IMethodResult oResult) { }
public override void setMsiReportCheckDigit(bool msiReportCheckDigit, IMethodResult oResult) { }
public override void getPdf417(IMethodResult oResult) { }
public override void setPdf417(bool pdf417, IMethodResult oResult) { }
public override void getSignature(IMethodResult oResult) { }
public override void setSignature(bool signature, IMethodResult oResult) { }
public override void getSignatureImageHeight(IMethodResult oResult) { }
public override void setSignatureImageHeight(int signatureImageHeight, IMethodResult oResult) { }
public override void getSignatureImageWidth(IMethodResult oResult) { }
public override void setSignatureImageWidth(int signatureImageWidth, IMethodResult oResult) { }
public override void getSignatureImageQuality(IMethodResult oResult) { }
public override void setSignatureImageQuality(int signatureImageQuality, IMethodResult oResult) { }
public override void getAusPostal(IMethodResult oResult) { }
public override void setAusPostal(bool ausPostal, IMethodResult oResult) { }
public override void getCanPostal(IMethodResult oResult) { }
public override void setCanPostal(bool canPostal, IMethodResult oResult) { }
public override void getDutchPostal(IMethodResult oResult) { }
public override void setDutchPostal(bool dutchPostal, IMethodResult oResult) { }
public override void getJapPostal(IMethodResult oResult) { }
public override void setJapPostal(bool japPostal, IMethodResult oResult) { }
public override void getUkPostal(IMethodResult oResult) { }
public override void setUkPostal(bool ukPostal, IMethodResult oResult) { }
public override void getUkPostalReportCheckDigit(IMethodResult oResult) { }
public override void setUkPostalReportCheckDigit(bool ukPostalReportCheckDigit, IMethodResult oResult) { }
public override void getUs4state(IMethodResult oResult) { }
public override void setUs4state(bool us4state, IMethodResult oResult) { }
public override void getUs4stateFics(IMethodResult oResult) { }
public override void setUs4stateFics(bool us4stateFics, IMethodResult oResult) { }
public override void getUsPlanet(IMethodResult oResult) { }
public override void setUsPlanet(bool usPlanet, IMethodResult oResult) { }
public override void getUsPlanetReportCheckDigit(IMethodResult oResult) { }
public override void setUsPlanetReportCheckDigit(bool usPlanetReportCheckDigit, IMethodResult oResult) { }
public override void getUsPostNet(IMethodResult oResult) { }
public override void setUsPostNet(bool usPostNet, IMethodResult oResult) { }
public override void getUsPostNetReportCheckDigit(IMethodResult oResult) { }
public override void setUsPostNetReportCheckDigit(bool usPostNetReportCheckDigit, IMethodResult oResult) { }
public override void getQrCode(IMethodResult oResult) { }
public override void setQrCode(bool qrCode, IMethodResult oResult) { }
public override void getGs1dataBar(IMethodResult oResult) { }
public override void setGs1dataBar(bool gs1dataBar, IMethodResult oResult) { }
public override void getGs1dataBarExpanded(IMethodResult oResult) { }
public override void setGs1dataBarExpanded(bool gs1dataBarExpanded, IMethodResult oResult) { }
public override void getGs1dataBarLimited(IMethodResult oResult) { }
public override void setGs1dataBarLimited(bool gs1dataBarLimited, IMethodResult oResult) { }
public override void getTlc39(IMethodResult oResult) { }
public override void setTlc39(bool tlc39, IMethodResult oResult) { }
public override void getTrioptic39(IMethodResult oResult) { }
public override void setTrioptic39(bool trioptic39, IMethodResult oResult) { }
public override void getTrioptic39Redundancy(IMethodResult oResult) { }
public override void setTrioptic39Redundancy(bool trioptic39Redundancy, IMethodResult oResult) { }
public override void getUpcEanBookland(IMethodResult oResult) { }
public override void setUpcEanBookland(bool upcEanBookland, IMethodResult oResult) { }
public override void getUpcEanBooklandFormat(IMethodResult oResult) { }
public override void setUpcEanBooklandFormat(string upcEanBooklandFormat, IMethodResult oResult) { }
public override void getUpcEanConvertGs1dataBarToUpcEan(IMethodResult oResult) { }
public override void setUpcEanConvertGs1dataBarToUpcEan(bool upcEanConvertGs1dataBarToUpcEan, IMethodResult oResult) { }
public override void getUpcEanCoupon(IMethodResult oResult) { }
public override void setUpcEanCoupon(bool upcEanCoupon, IMethodResult oResult) { }
public override void getUpcEanLinearDecode(IMethodResult oResult) { }
public override void setUpcEanLinearDecode(bool upcEanLinearDecode, IMethodResult oResult) { }
public override void getUpcEanRandomWeightCheckDigit(IMethodResult oResult) { }
public override void setUpcEanRandomWeightCheckDigit(bool upcEanRandomWeightCheckDigit, IMethodResult oResult) { }
public override void getUpcEanRetryCount(IMethodResult oResult) { }
public override void setUpcEanRetryCount(int upcEanRetryCount, IMethodResult oResult) { }
public override void getUpcEanSecurityLevel(IMethodResult oResult) { }
public override void setUpcEanSecurityLevel(int upcEanSecurityLevel, IMethodResult oResult) { }
public override void getUpcEanSupplemental2(IMethodResult oResult) { }
public override void setUpcEanSupplemental2(bool upcEanSupplemental2, IMethodResult oResult) { }
public override void getUpcEanSupplemental5(IMethodResult oResult) { }
public override void setUpcEanSupplemental5(bool upcEanSupplemental5, IMethodResult oResult) { }
public override void getUpcEanSupplementalMode(IMethodResult oResult) { }
public override void setUpcEanSupplementalMode(string upcEanSupplementalMode, IMethodResult oResult) { }
public override void getUpca(IMethodResult oResult) { }
public override void setUpca(bool upca, IMethodResult oResult) { }
public override void getUpcaPreamble(IMethodResult oResult) { }
public override void setUpcaPreamble(string upcaPreamble, IMethodResult oResult) { }
public override void getUpcaReportCheckDigit(IMethodResult oResult) { }
public override void setUpcaReportCheckDigit(bool upcaReportCheckDigit, IMethodResult oResult) { }
public override void getUpce0(IMethodResult oResult) { }
public override void setUpce0(bool upce0, IMethodResult oResult) { }
public override void getUpce0convertToUpca(IMethodResult oResult) { }
public override void setUpce0convertToUpca(bool upce0convertToUpca, IMethodResult oResult) { }
public override void getUpce0preamble(IMethodResult oResult) { }
public override void setUpce0preamble(string upce0preamble, IMethodResult oResult) { }
public override void getUpce0reportCheckDigit(IMethodResult oResult) { }
public override void setUpce0reportCheckDigit(bool upce0reportCheckDigit, IMethodResult oResult) { }
public override void getUpce1(IMethodResult oResult) { }
public override void setUpce1(bool upce1, IMethodResult oResult) { }
public override void getUpce1convertToUpca(IMethodResult oResult) { }
public override void setUpce1convertToUpca(bool upce1convertToUpca, IMethodResult oResult) { }
public override void getUpce1preamble(IMethodResult oResult) { }
public override void setUpce1preamble(string upce1preamble, IMethodResult oResult) { }
public override void getUpce1reportCheckDigit(IMethodResult oResult) { }
public override void setUpce1reportCheckDigit(bool upce1reportCheckDigit, IMethodResult oResult) { }
public override void getWebcode(IMethodResult oResult) { }
public override void setWebcode(bool webcode, IMethodResult oResult) { }
public override void getWebcodeDecodeGtSubtype(IMethodResult oResult) { }
public override void setWebcodeDecodeGtSubtype(bool webcodeDecodeGtSubtype, IMethodResult oResult) { }
public override void getRsmModelNumber(IMethodResult oResult) { }
public override void getRsmSerialNumber(IMethodResult oResult) { }
public override void getRsmDateOfManufacture(IMethodResult oResult) { }
public override void getRsmDateOfService(IMethodResult oResult) { }
public override void getRsmBluetoothAddress(IMethodResult oResult) { }
public override void getRsmFirmwareVersion(IMethodResult oResult) { }
public override void getRsmDeviceClass(IMethodResult oResult) { }
public override void getRsmBatteryStatus(IMethodResult oResult) { }
public override void getRsmBatteryCapacity(IMethodResult oResult) { }
public override void getRsmBatteryId(IMethodResult oResult) { }
public override void getRsmBluetoothAuthentication(IMethodResult oResult) { }
public override void setRsmBluetoothAuthentication(bool rsmBluetoothAuthentication, IMethodResult oResult) { }
public override void getRsmBluetoothEncryption(IMethodResult oResult) { }
public override void setRsmBluetoothEncryption(bool rsmBluetoothEncryption, IMethodResult oResult) { }
public override void getRsmBluetoothPinCode(IMethodResult oResult) { }
public override void setRsmBluetoothPinCode(string rsmBluetoothPinCode, IMethodResult oResult) { }
public override void getRsmBluetoothPinCodeType(IMethodResult oResult) { }
public override void setRsmBluetoothPinCodeType(string rsmBluetoothPinCodeType, IMethodResult oResult) { }
public override void getRsmBluetoothReconnectionAttempts(IMethodResult oResult) { }
public override void setRsmBluetoothReconnectionAttempts(int rsmBluetoothReconnectionAttempts, IMethodResult oResult) { }
public override void getRsmBluetoothBeepOnReconnectAttempt(IMethodResult oResult) { }
public override void setRsmBluetoothBeepOnReconnectAttempt(bool rsmBluetoothBeepOnReconnectAttempt, IMethodResult oResult) { }
public override void getRsmBluetoothHidAutoReconnect(IMethodResult oResult) { }
public override void setRsmBluetoothHidAutoReconnect(string rsmBluetoothHidAutoReconnect, IMethodResult oResult) { }
public override void getRsmBluetoothFriendlyName(IMethodResult oResult) { }
public override void setRsmBluetoothFriendlyName(string rsmBluetoothFriendlyName, IMethodResult oResult) { }
public override void getRsmBluetoothInquiryMode(IMethodResult oResult) { }
public override void setRsmBluetoothInquiryMode(string rsmBluetoothInquiryMode, IMethodResult oResult) { }
public override void getRsmBluetoothAutoReconnect(IMethodResult oResult) { }
public override void setRsmBluetoothAutoReconnect(string rsmBluetoothAutoReconnect, IMethodResult oResult) { }
public override void getRsmForceSavePairingBarcode(IMethodResult oResult) { }
public override void setRsmForceSavePairingBarcode(bool rsmForceSavePairingBarcode, IMethodResult oResult) { }
public override void getRsmLowBatteryIndication(IMethodResult oResult) { }
public override void setRsmLowBatteryIndication(bool rsmLowBatteryIndication, IMethodResult oResult) { }
public override void getRsmLowBatteryIndicationCycle(IMethodResult oResult) { }
public override void setRsmLowBatteryIndicationCycle(int rsmLowBatteryIndicationCycle, IMethodResult oResult) { }
public override void getRsmScanLineWidth(IMethodResult oResult) { }
public override void setRsmScanLineWidth(string rsmScanLineWidth, IMethodResult oResult) { }
public override void getRsmGoodScansDelay(IMethodResult oResult) { }
public override void setRsmGoodScansDelay(int rsmGoodScansDelay, IMethodResult oResult) { }
public override void getRsmDecodeFeedback(IMethodResult oResult) { }
public override void setRsmDecodeFeedback(bool rsmDecodeFeedback, IMethodResult oResult) { }
public override void getRsmIgnoreCode128Usps(IMethodResult oResult) { }
public override void setRsmIgnoreCode128Usps(bool rsmIgnoreCode128Usps, IMethodResult oResult) { }
public override void getRsmScanTriggerWakeup(IMethodResult oResult) { }
public override void setRsmScanTriggerWakeup(bool rsmScanTriggerWakeup, IMethodResult oResult) { }
public override void getRsmMems(IMethodResult oResult) { }
public override void setRsmMems(bool rsmMems, IMethodResult oResult) { }
public override void getRsmProximityEnable(IMethodResult oResult) { }
public override void setRsmProximityEnable(bool rsmProximityEnable, IMethodResult oResult) { }
public override void getRsmProximityContinuous(IMethodResult oResult) { }
public override void setRsmProximityContinuous(bool rsmProximityContinuous, IMethodResult oResult) { }
public override void getRsmProximityDistance(IMethodResult oResult) { }
public override void setRsmProximityDistance(string rsmProximityDistance, IMethodResult oResult) { }
public override void getRsmPagingEnable(IMethodResult oResult) { }
public override void setRsmPagingEnable(bool rsmPagingEnable, IMethodResult oResult) { }
public override void getRsmPagingBeepSequence(IMethodResult oResult) { }
public override void setRsmPagingBeepSequence(int rsmPagingBeepSequence, IMethodResult oResult) { }
public override void registerBluetoothStatus(IMethodResult oResult) { }
public override void getScannerType(IMethodResult oResult) { }
public override void commandRemoteScanner(string command, IMethodResult oResult) { }
/* --------------------------------------------------------------------------------------------------------- */
bool _isEnable = false;
public override void enable(IReadOnlyDictionary<string, string> propertyMap, IMethodResult oResult)
{
_isEnable = true;
}
public override void start(IMethodResult oResult)
{
// implement this method in C# here
}
public override void stop(IMethodResult oResult)
{
// implement this method in C# here
}
public override void disable(IMethodResult oResult)
{
_isEnable = false;
}
public override void barcode_recognize(string imageFilePath, IMethodResult oResult)
{
_recognizeResult = "";
_methodResult = oResult;
StreamResourceInfo info = Application.GetResourceStream(new Uri(imageFilePath, UriKind.Relative));
BitmapSource bmSrc = null;
dispatchInvoke(() =>
{
bmSrc = new BitmapImage();
bmSrc.SetSource(info.Stream);
WriteableBitmap writableBitmap = new WriteableBitmap(bmSrc);
writableBitmap.Invalidate();
_barcodeReader.TryHarder = true;
_barcodeReader.ResultFound += BarcodeRecognizeTask_Completed;
_barcodeReader.Decode(writableBitmap);
_waitHandle.Set();
});
_waitHandle.WaitOne();
oResult.set(_recognizeResult);
}
void BarcodeRecognizeTask_Completed(Result obj)
{
_recognizeResult = obj.Text;
}
public override void getSupportedProperties(IMethodResult oResult)
{
List<String> propsList = new List<String>();
oResult.set(propsList);
}
public override void take(IReadOnlyDictionary<string, string> propertyMap, IMethodResult oResult)
{
_methodResult = oResult;
dispatchInvoke(() =>
{
_barcodeScanTask.Show();
});
}
public override void take_barcode(string rubyCallbackURL, IReadOnlyDictionary<string, string> propertyMap, IMethodResult oResult)
{
take(null, oResult);
}
static Mutex _m = new Mutex();
private void BarcodeScanTask_Completed(object sender, BarcodeReaderLib.OpticalReaderResult readerResult)
{
System.Diagnostics.Debug.WriteLine("BarcodeScanTask_Completed 1");
if (readerResult.TaskResult == TaskResult.None || _methodResult == null)
return;
if (readerResult.TaskResult == TaskResult.Cancel)
{
new Thread(() =>
{
_m.WaitOne();
System.Diagnostics.Debug.WriteLine("BarcodeScanTask_Completed cancel");
Dictionary<string, string> result = new Dictionary<string, string>();
result.Add("status", "cancel");
result.Add("barcode", "");
_methodResult.set(result);
_methodResult = null;
_m.ReleaseMutex();
}).Start();
return;
}
VibrateController.Default.Start(TimeSpan.FromMilliseconds(Barcode.vibrateTimeMs));
System.Diagnostics.Debug.WriteLine("BarcodeScanTask_Completed 2");
_prevScanResult = readerResult.Text;
CRhoRuntime.getInstance().logEvent(String.Format("Barcode: {0} = {1}",
readerResult.Format,
readerResult.Text)
);
new Thread(() =>
{
_m.WaitOne();
System.Diagnostics.Debug.WriteLine("BarcodeScanTask_Completed send data");
Dictionary<string, string> result = new Dictionary<string, string>();
result.Add("status", "ok");
result.Add("barcode", readerResult.Text);
_methodResult.set(result);
_m.ReleaseMutex();
}).Start();
}
}
public class BarcodeSingleton : BarcodeSingletonBase
{
public BarcodeSingleton()
{
}
public override void enumerate(IMethodResult oResult)
{
List<String> scannerEnum = new List<String>();
scannerEnum.Add("CameraScanner1");
oResult.set(scannerEnum);
}
}
public class BarcodeFactory : BarcodeFactoryBase
{
}
}
}
| |
$CfgDefault_3DView_SelectByGroup = "1";
$CfgDefault_BrushTool_isEnabled = "1";
$CfgDefault_BrushTool_pluginOrder = "5";
$CfgDefault_BrushTool_pluginOrderDefault = "3";
$CfgDefault_BrushTool_renderMesh = "0";
$CfgDefault_Common_Camera_cameraDisplayMode = "Standard";
$CfgDefault_Common_Camera_cameraDisplayType = "6";
$CfgDefault_Common_Camera_cameraSpeed = "71.2575";
$CfgDefault_Common_Camera_CamViewEnabled = "0";
$CfgDefault_Common_Camera_DefaultControlMode = "Player";
$CfgDefault_Common_Camera_invertXAxis = "0";
$CfgDefault_Common_Camera_invertYAxis = "0";
$CfgDefault_Common_Camera_LaunchDefaultAlways = "0";
$CfgDefault_Common_Camera_LaunchFreeviewAlways = "1";
$CfgDefault_Common_Camera_LaunchInFreeview = "0";
$CfgDefault_Common_Camera_MouseMoveMultiplier = "0.31";
$CfgDefault_Common_Camera_MouseScrollMultiplier = "0.57";
$CfgDefault_Common_Camera_movementSpeed = "40";
$CfgDefault_Common_Camera_orthoFOV = "0.681498";
$CfgDefault_Common_Camera_renderOrthoGrid = "0";
$CfgDefault_Common_Color_dragRectColor = "220 254 3 255";
$CfgDefault_Common_Color_faceSelectColor = "213 192 192 255";
$CfgDefault_Common_Color_gridColor = "85 3 0 203";
$CfgDefault_Common_Color_gridMinorTickColor = "36 67 15 255";
$CfgDefault_Common_Color_gridOriginColor = "183 183 183 255";
$CfgDefault_Common_Color_objectTextColor = "248 254 3 255";
$CfgDefault_Common_Color_objMouseOverColor = "254 3 9 255";
$CfgDefault_Common_Color_objMouseOverSelectColor = "197 199 46 254";
$CfgDefault_Common_Color_objSelectColor = "254 155 3 255";
$CfgDefault_Common_Color_popupBackgroundColor = "85 3 169 185";
$CfgDefault_Common_Color_popupTextColor = "84 1 169 255";
$CfgDefault_Common_Color_raceSelectColor = "243 254 3 255";
$CfgDefault_Common_Color_selectGridColor = "248 164 76 65";
$CfgDefault_Common_Color_selectionBoxColor = "18 238 64 255";
$CfgDefault_Common_Color_uvEditorHandleColor = "254 3 9 255";
$CfgDefault_Common_General_levelsDirectory = "art/Worlds/TorqueLab";
$CfgDefault_Common_General_TorsionPath = "C:\\Program Files (x86)\\Torsion\\Torsion.exe";
$CfgDefault_Common_General_undoLimit = "40";
$CfgDefault_Common_Gizmo_ = "0.380392 0.996078 0.333333 1";
$CfgDefault_Common_Gizmo_allowSnapRotations = "1";
$CfgDefault_Common_Gizmo_allowSnapScale = "1";
$CfgDefault_Common_Gizmo_alwaysRotationSnap = "1";
$CfgDefault_Common_Gizmo_gridColor = "243 254 3 248";
$CfgDefault_Common_Gizmo_gridSize = "10";
$CfgDefault_Common_Gizmo_planeDim = "40";
$CfgDefault_Common_Gizmo_renderInfoText = "1";
$CfgDefault_Common_Gizmo_renderMoveGrid = "1";
$CfgDefault_Common_Gizmo_renderPlane = "0";
$CfgDefault_Common_Gizmo_renderPlaneHashes = "1";
$CfgDefault_Common_Gizmo_renderSolid = "0";
$CfgDefault_Common_Gizmo_renderWhenUsed = "1";
$CfgDefault_Common_Gizmo_rotateScalar = "0.8";
$CfgDefault_Common_Gizmo_rotationSnap = "7.25";
$CfgDefault_Common_Gizmo_scaleScalar = "0.7";
$CfgDefault_Common_Gizmo_scaleSnap = "0.4";
$CfgDefault_Common_Gizmo_screenLength = "89";
$CfgDefault_Common_Gizmo_snapToGrid = "0";
$CfgDefault_Common_Grid_forceToGrid = "0";
$CfgDefault_Common_Grid_forceToGridNoZ = "1";
$CfgDefault_Common_Grid_gridSize = "1.5";
$CfgDefault_Common_Grid_gridStep = "1";
$CfgDefault_Common_Grid_planeDim = "200";
$CfgDefault_Common_Grid_renderPlane = "0";
$CfgDefault_Common_Grid_renderPlaneHashes = "1";
$CfgDefault_Common_Objects_boundingBoxCollision = "1";
$CfgDefault_Common_Objects_dropAtScreenCenterMax = "100.0";
$CfgDefault_Common_Objects_dropAtScreenCenterScalar = "0";
$CfgDefault_Common_Objects_dropType = "toTerrain";
$CfgDefault_Common_Objects_fadeIcons = "1";
$CfgDefault_Common_Objects_fadeIconsDist = "12";
$CfgDefault_Common_Objects_forceLoadDAE = "0";
$CfgDefault_Common_Objects_gridSnap = "0";
$CfgDefault_Common_Objects_IgnoreDropSelRotation = "0";
$CfgDefault_Common_Objects_renderObjHandle = "1";
$CfgDefault_Common_Objects_renderObjText = "1";
$CfgDefault_Common_Objects_renderPopupBackground = "1";
$CfgDefault_Common_Objects_renderSelectionBox = "1";
$CfgDefault_Common_Objects_showMousePopupInfo = "1";
$CfgDefault_Common_Objects_snapGround = "0";
$CfgDefault_Common_Objects_snapSoft = "0";
$CfgDefault_Common_Objects_snapSoftSize = "2.0";
$CfgDefault_convexEditor_isEnabled = 1;
$CfgDefault_convexEditor_pluginOrder = "15";
$CfgDefault_ConvexEditor_pluginOrderDefault = "16";
$CfgDefault_DatablockEditor_excludeClientOnlyDatablocks = "1";
$CfgDefault_datablockEditor_isEnabled = 1;
$CfgDefault_datablockEditor_pluginOrder = "6";
$CfgDefault_DatablockEditor_pluginOrderDefault = "12";
$CfgDefault_DecalEditor_DefaultScale = "1";
$CfgDefault_decalEditor_isEnabled = 1;
$CfgDefault_decalEditor_pluginOrder = "5";
$CfgDefault_DecalEditor_pluginOrderDefault = "8";
$CfgDefault_Dev_Console_DevLogLevel = "1";
$CfgDefault_Dev_Console_MouseDragLog = "1";
$CfgDefault_Dev_Console_MouseDragLogDelay = "0.5";
$CfgDefault_Dev_Console_MouseLog = "1";
$CfgDefault_Dev_Console_MouseMoveLog = "1";
$CfgDefault_Dev_Console_MouseMoveLogDelay = "0.5";
$CfgDefault_Dev_Console_ShowInfos = "1";
$CfgDefault_Dev_Console_ShowNotes = "1";
$CfgDefault_Dev_Console_ShowParamLog = "0";
$CfgDefault_Dev_Console_TraceLogLevel = "1";
$CfgDefault_Development_Console_DevLogLevel = "0";
$CfgDefault_Development_Console_MouseDragLog = "0";
$CfgDefault_Development_Console_MouseDragLogDelay = "0";
$CfgDefault_Development_Console_MouseLog = "0";
$CfgDefault_Development_Console_MouseMoveLog = "0";
$CfgDefault_Development_Console_MouseMoveLogDelay = "0";
$CfgDefault_Development_Console_ShowInfos = "1";
$CfgDefault_Development_Console_ShowNotes = "1";
$CfgDefault_Development_Console_ShowParamLog = "0";
$CfgDefault_Development_Console_TraceLogLevel = "0";
$CfgDefault_ForestEditor_BrushHardness = "2";
$CfgDefault_ForestEditor_BrushPressure = "2";
$CfgDefault_ForestEditor_BrushSize = "5";
$CfgDefault_ForestEditor_DefaultBrush = "BaseBrush";
$CfgDefault_ForestEditor_DefaultBrushHardness = "50";
$CfgDefault_ForestEditor_DefaultBrushPressure = "20";
$CfgDefault_ForestEditor_DefaultBrushSize = "5";
$CfgDefault_ForestEditor_DefaultGlobalScale = "1";
$CfgDefault_ForestEditor_GlobalScale = "1";
$CfgDefault_forestEditor_isEnabled = 1;
$CfgDefault_forestEditor_pluginOrder = "9";
$CfgDefault_ForestEditor_pluginOrderDefault = "4";
$CfgDefault_General_Console_DevLogLevel = "1";
$CfgDefault_General_Console_ShowInfos = "1";
$CfgDefault_General_Console_ShowNotes = "1";
$CfgDefault_General_Console_ShowParamLog = "0";
$CfgDefault_General_Console_TraceLogLevel = "0";
$CfgDefault_General_Misc_undoLimit = "40";
$CfgDefault_GuiEditor__ = "";
$CfgDefault_GuiEditor_drawBorderLines_fullBox = "1";
$CfgDefault_GuiEditor_drawGuides_fullBox = "1";
$CfgDefault_GuiEditor_Editor_lastPath = "G:/GameProjects/AlterVerse/LOCAL/Game/tlab/materialEditor/gui";
$CfgDefault_GuiEditor_Editor_previewResolution = "1440 900";
$CfgDefault_GuiEditor_EngineDevelopment_showEditorGuis = "";
$CfgDefault_GuiEditor_EngineDevelopment_showEditorProfiles = "";
$CfgDefault_GuiEditor_EngineDevelopment_toggleIntoEditor = "";
$CfgDefault_GuiEditor_GuiEditor_lastPath = "F:/Game Workplace/AlterVerse/GameGIT/tlab/EditorLab/guiSystem/SideBar";
$CfgDefault_GuiEditor_GuiEditor_previewResolution = "1440 900";
$CfgDefault_GuiEditor_Help_documentationLocal = "";
$CfgDefault_GuiEditor_Help_documentationReference = "";
$CfgDefault_GuiEditor_Help_documentationURL = "";
$CfgDefault_GuiEditor_Library_viewType = "Categorized";
$CfgDefault_GuiEditor_Rendering_drawBorderLines = "1";
$CfgDefault_GuiEditor_Rendering_drawGuides = "1";
$CfgDefault_GuiEditor_Selection_fullBox = "1";
$CfgDefault_GuiEditor_Snapping_sensitivity = "2";
$CfgDefault_GuiEditor_Snapping_snap2Grid = "";
$CfgDefault_GuiEditor_Snapping_snap2GridSize = "";
$CfgDefault_GuiEditor_Snapping_snapToCanvas = "1";
$CfgDefault_GuiEditor_Snapping_snapToCenters = "0";
$CfgDefault_GuiEditor_Snapping_snapToControls = "0";
$CfgDefault_GuiEditor_Snapping_snapToEdges = "0";
$CfgDefault_GuiEditor_Snapping_snapToGuides = "0";
$CfgDefault_Interface_Editor_ToolFrameLocked = "0";
$CfgDefault_Interface_Editor_ToolFrameSize = "Normal";
$CfgDefault_ipsEditor_isEnabled = 1;
$CfgDefault_ipsEditor_pluginOrder = "4";
$CfgDefault_IpsEditor_pluginOrderDefault = "6";
$CfgDefault_LockToolbar = 0;
$CfgDefault_MaterialEditor_AOSuffix = "_s";
$CfgDefault_MaterialEditor_AutoAddAO = "1";
$CfgDefault_MaterialEditor_AutoAddComposite = "1";
$CfgDefault_MaterialEditor_AutoAddMetalness = "1";
$CfgDefault_MaterialEditor_AutoAddNormal = "1";
$CfgDefault_MaterialEditor_AutoAddSmoothness = "1";
$CfgDefault_MaterialEditor_AutoAddSpecular = "1";
$CfgDefault_MaterialEditor_CompositeSuffix = "_s";
$CfgDefault_MaterialEditor_DefaultMaterialFile = "10";
$CfgDefault_MaterialEditor_DiffuseSuffix = "_d";
$CfgDefault_materialEditor_isEnabled = 1;
$CfgDefault_MaterialEditor_MapModePBR = "1";
$CfgDefault_MaterialEditor_MetalnessSuffix = "_s";
$CfgDefault_MaterialEditor_NormalSuffix = "_n";
$CfgDefault_MaterialEditor_PBRenabled = "1";
$CfgDefault_materialEditor_pluginOrder = "2";
$CfgDefault_MaterialEditor_pluginOrderDefault = "5";
$CfgDefault_MaterialEditor_PropShowGroup_Advanced = "0";
$CfgDefault_MaterialEditor_PropShowGroup_Animation = "0";
$CfgDefault_MaterialEditor_PropShowGroup_Lighting = "1";
$CfgDefault_MaterialEditor_PropShowGroup_PBR = "0";
$CfgDefault_MaterialEditor_PropShowGroup_Rendering = "1";
$CfgDefault_MaterialEditor_PropShowGroup_TextureMaps = 1;
$CfgDefault_MaterialEditor_PropShowMap_Detail = "0";
$CfgDefault_MaterialEditor_PropShowMap_detailNormal = "0";
$CfgDefault_MaterialEditor_PropShowMap_Diffuse = 1;
$CfgDefault_MaterialEditor_PropShowMap_environment = "0";
$CfgDefault_MaterialEditor_PropShowMap_Light = "0";
$CfgDefault_MaterialEditor_PropShowMap_Normal = "1";
$CfgDefault_MaterialEditor_PropShowMap_Overlay = "0";
$CfgDefault_MaterialEditor_PropShowMap_specular = "0";
$CfgDefault_MaterialEditor_PropShowMap_tone = "0";
$CfgDefault_MaterialEditor_SmoothnessSuffix = "_s";
$CfgDefault_MaterialEditor_SpecularSuffix = "_s";
$CfgDefault_MaterialEditor_ThumbnailCountIndex = "3";
$CfgDefault_meshRoadEditor_isEnabled = 1;
$CfgDefault_meshRoadEditor_pluginOrder = "10";
$CfgDefault_MeshRoadEditor_pluginOrderDefault = "9";
$CfgDefault_NavEditor_backgroundBuild = "1";
$CfgDefault_navEditor_isEnabled = 1;
$CfgDefault_NavEditor_playSoundWhenDone = "1";
$CfgDefault_navEditor_pluginOrder = "7";
$CfgDefault_NavEditor_pluginOrderDefault = "15";
$CfgDefault_NavEditor_renderBVTree = "0";
$CfgDefault_NavEditor_renderMesh = "0";
$CfgDefault_NavEditor_renderPortals = "0";
$CfgDefault_NavEditor_saveIntermediates = "1";
$CfgDefault_NavEditor_spawnClass = "AIPlayer";
$CfgDefault_NavEditor_spawnDatablock = "DemoPlayerData";
$CfgDefault_particleEditor_isEnabled = 1;
$CfgDefault_particleEditor_pluginOrder = "8";
$CfgDefault_ParticleEditor_pluginOrderDefault = "14";
$CfgDefault_Plugins_ShapeLab_UseSimplifiedSystem = 1;
$CfgDefault_RiverEditor_DefaultDepth = "5";
$CfgDefault_RiverEditor_DefaultNormal = "0 0 1";
$CfgDefault_RiverEditor_DefaultWidth = "10";
$CfgDefault_RiverEditor_HoverNodeColor = "255 255 255 255";
$CfgDefault_RiverEditor_HoverSplineColor = "255 0 0 255";
$CfgDefault_riverEditor_isEnabled = 1;
$CfgDefault_riverEditor_pluginOrder = "11";
$CfgDefault_RiverEditor_pluginOrderDefault = "11";
$CfgDefault_RiverEditor_SelectedSplineColor = "255 0 255 255";
$CfgDefault_RoadEditor_borderMovePixelSize = "20";
$CfgDefault_RoadEditor_borderMoveSpeed = "0.1";
$CfgDefault_RoadEditor_consoleCircleSegments = "32";
$CfgDefault_RoadEditor_consoleFillColor = "0 0 0 0";
$CfgDefault_RoadEditor_consoleFrameColor = "255 0 0 255";
$CfgDefault_RoadEditor_consoleLineWidth = "1";
$CfgDefault_RoadEditor_consoleSphereLevel = "1";
$CfgDefault_RoadEditor_DefaultWidth = "10";
$CfgDefault_RoadEditor_HoverNodeColor = "255 255 255 255";
$CfgDefault_RoadEditor_HoverSplineColor = "255 0 0 255";
$CfgDefault_roadEditor_isEnabled = 1;
$CfgDefault_RoadEditor_MaterialName = "DefaultDecalRoadMaterial";
$CfgDefault_roadEditor_pluginOrder = "12";
$CfgDefault_RoadEditor_pluginOrderDefault = "10";
$CfgDefault_RoadEditor_SelectedSplineColor = "0 255 0 255";
$CfgDefault_SceneEditor_AutoCreatePrefab = "1";
$CfgDefault_SceneEditor_AutoLight_ShowLights = "0";
$CfgDefault_SceneEditor_AutoLight_ShowShapes = "0";
$CfgDefault_SceneEditor_AutoPrefabFolder = "art/models/prefabs/";
$CfgDefault_SceneEditor_AutoPrefabMode = "1";
$CfgDefault_SceneEditor_CoreGroup = "mgCore";
$CfgDefault_SceneEditor_CoverPointGroup = "CoverPoint";
$CfgDefault_SceneEditor_DropLocation = "10";
$CfgDefault_SceneEditor_EnvironmentGroup = "mgEnvironment";
$CfgDefault_SceneEditor_GroundCoverDefaultMaterial = "grass1";
$CfgDefault_SceneEditor_IconWidth = "120";
$CfgDefault_SceneEditor_isEnabled = 1;
$CfgDefault_SceneEditor_LightsGroup = "mgLights";
$CfgDefault_SceneEditor_MiscObjectGroup = "mgMiscObject";
$CfgDefault_SceneEditor_NavAIGroup = "NavAI";
$CfgDefault_SceneEditor_NavMeshGroup = "NavMesh";
$CfgDefault_SceneEditor_NavPathGroup = "NavPath";
$CfgDefault_SceneEditor_Occluders = "mgOccluders";
$CfgDefault_SceneEditor_pluginOrder = "1";
$CfgDefault_SceneEditor_pluginOrderDefault = "1";
$CfgDefault_SceneEditor_renameInternal = "0";
$CfgDefault_SceneEditor_SceneObjectsGroup = "mgSceneObjects";
$CfgDefault_SceneEditor_ShapeGroup = "mgShapeGroup";
$CfgDefault_SceneEditor_showClassNames = "0";
$CfgDefault_SceneEditor_showInternalNames = "0";
$CfgDefault_SceneEditor_showObjectIds = "0";
$CfgDefault_SceneEditor_showObjectNames = "1";
$CfgDefault_SceneEditor_SpawnGroup = "PlayerDropPoints";
$CfgDefault_SceneEditor_TSStaticGroup = "mgMapModels";
$CfgDefault_SceneEditor_Vehicle = "Vehicle";
$CfgDefault_ShapeEditor_AdvancedWindowVisible = "1";
$CfgDefault_ShapeEditor_AnimationBarVisible = "1";
$CfgDefault_ShapeEditor_BackgroundColor = "0 0 0 0.9";
$CfgDefault_ShapeEditor_GridDimension = "40 40";
$CfgDefault_ShapeEditor_GridSize = "0.1";
$CfgDefault_ShapeEditor_HighlightMaterial = "1";
$CfgDefault_ShapeEditor_isEnabled = "1";
$CfgDefault_ShapeEditor_pluginOrder = "13";
$CfgDefault_ShapeEditor_pluginOrderDefault = "13";
$CfgDefault_ShapeEditor_PreviewColorBG = "0 0 0 0.9";
$CfgDefault_ShapeEditor_RenderCollision = "0";
$CfgDefault_ShapeEditor_RenderMounts = "1";
$CfgDefault_ShapeEditor_ShowBounds = "0";
$CfgDefault_ShapeEditor_ShowGrid = "1";
$CfgDefault_ShapeEditor_ShowNodes = "1";
$CfgDefault_ShapeEditor_ShowObjBox = "1";
$CfgDefault_ShapeEditor_SunAmbientColor = "180 180 180 255";
$CfgDefault_ShapeEditor_SunAngleX = "45";
$CfgDefault_ShapeEditor_SunAngleZ = "135";
$CfgDefault_ShapeEditor_SunDiffuseColor = "255 255 255 255";
$CfgDefault_ShapeLab_AdvancedWindowVisible = "1";
$CfgDefault_ShapeLab_AnimationBarVisible = "1";
$CfgDefault_ShapeLab_BackgroundColor = "0.12549 0.00784314 0.996078 1";
$CfgDefault_ShapeLab_GridDimension = "40 30";
$CfgDefault_ShapeLab_GridSize = "0.1";
$CfgDefault_ShapeLab_HighlightMaterial = "1";
$CfgDefault_ShapeLab_isEnabled = 1;
$CfgDefault_ShapeLab_pluginOrder = "3";
$CfgDefault_ShapeLab_pluginOrderDefault = "7";
$CfgDefault_ShapeLab_PreviewColorBG = "0 0 0 0.9";
$CfgDefault_ShapeLab_RenderCollision = "0";
$CfgDefault_ShapeLab_RenderMounts = "1";
$CfgDefault_ShapeLab_ShowBounds = "0";
$CfgDefault_ShapeLab_ShowGrid = "1";
$CfgDefault_ShapeLab_ShowNodes = "1";
$CfgDefault_ShapeLab_ShowObjBox = "1";
$CfgDefault_ShapeLab_SunAmbientColor = "180";
$CfgDefault_ShapeLab_SunAngleX = "45";
$CfgDefault_ShapeLab_SunAngleZ = "135";
$CfgDefault_ShapeLab_SunDiffuseColor = "255";
$CfgDefault_TerrainEditor_adjustHeightVal = "10";
$CfgDefault_TerrainEditor_BrushPressure = "1";
$CfgDefault_TerrainEditor_BrushSetHeight = "1";
$CfgDefault_TerrainEditor_BrushSetHeightRange = "0 100";
$CfgDefault_TerrainEditor_BrushSize = "2";
$CfgDefault_TerrainEditor_BrushSoftness = "1";
$CfgDefault_TerrainEditor_BrushType = "box";
$CfgDefault_TerrainEditor_DefaultBrushPressure = "43";
$CfgDefault_TerrainEditor_DefaultBrushSetHeight = "100";
$CfgDefault_TerrainEditor_DefaultBrushSize = "8";
$CfgDefault_TerrainEditor_DefaultBrushSoftness = "43";
$CfgDefault_TerrainEditor_DefaultBrushType = "box";
$CfgDefault_TerrainEditor_isEnabled = 1;
$CfgDefault_TerrainEditor_maxBrushSize = "40 40";
$CfgDefault_TerrainEditor_noiseFactor = "1";
$CfgDefault_TerrainEditor_pluginOrder = "13";
$CfgDefault_TerrainEditor_pluginOrderDefault = "2";
$CfgDefault_TerrainEditor_scaleVal = "1";
$CfgDefault_TerrainEditor_setHeightVal = "100";
$CfgDefault_TerrainEditor_slopeMaxAngle = "90";
$CfgDefault_TerrainEditor_slopeMinAngle = "0";
$CfgDefault_TerrainEditor_smoothFactor = "0.1";
$CfgDefault_TerrainEditor_softSelectDefaultFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$CfgDefault_TerrainEditor_softSelectFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$CfgDefault_TerrainEditor_softSelectRadius = "50";
$CfgDefault_TerrainPainter_adjustHeightVal = "10";
$CfgDefault_TerrainPainter_DefaultBrushPressure = "50";
$CfgDefault_TerrainPainter_DefaultBrushSize = "2";
$CfgDefault_TerrainPainter_DefaultBrushSlopeMax = "90";
$CfgDefault_TerrainPainter_DefaultBrushSlopeMin = "0";
$CfgDefault_TerrainPainter_DefaultBrushSoftness = "50";
$CfgDefault_TerrainPainter_DefaultBrushType = "box";
$CfgDefault_TerrainPainter_isEnabled = 1;
$CfgDefault_TerrainPainter_noiseFactor = "1";
$CfgDefault_TerrainPainter_pluginOrder = "14";
$CfgDefault_TerrainPainter_pluginOrderDefault = "3";
$CfgDefault_TerrainPainter_scaleVal = "1";
$CfgDefault_TerrainPainter_setHeightVal = "100";
$CfgDefault_TerrainPainter_slopeMaxAngle = "90";
$CfgDefault_TerrainPainter_slopeMinAngle = "0";
$CfgDefault_TerrainPainter_smoothFactor = "0.1";
$CfgDefault_TerrainPainter_softSelectDefaultFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$CfgDefault_TerrainPainter_softSelectFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$CfgDefault_TerrainPainter_softSelectRadius = "50";
$CfgDefault_TLab_AutoSaveDelay = "5";
$CfgDefault_TLab_Class_Inspector_LogLevel = 0;
$CfgDefault_TLab_defaultGui = "MainMenuGui";
$CfgDefault_TLab_GuiEditorLoaded = 1;
$CfgDefault_TLab_LeftFrameMin = "123";
$CfgDefault_TLab_Object_DropTypes = "atOrigin atCamera atCameraRot belowCamera screenCenter atCentroid toTerrain belowSelection";
$CfgDefault_TLab_PrefabAutoMode = "Level Object Folder";
$CfgDefault_TLab_RightFrameMin = "123";
$CfgDefault_TLab_Theme = "Laborean";
$CfgDefault_TLab_ThemePath = "tlab/themes/Laborean/";
$CfgDefault_TorsionPath = "40";
$CfgDefault_UI_Editor_SideFrameWidth = "220";
$CfgDefault_UI_Editor_ToolFrameWidth = "280";
$CfgDefault_UI_Frame_SideFrameWidth = "220";
$CfgDefault_UI_Frame_ToolFrameWidth = "280";
$CfgDefault_UI_Menu_UseNativeMenu = 0;
$CfgDefault_WorldEditor_Misc_defaultHandle = "tlab/art/icons/default/DefaultHandle";
$CfgDefault_WorldEditor_Misc_documentationLocal = "../../../Documentation/Official";
$CfgDefault_WorldEditor_Misc_documentationReference = "../../../Documentation/Torque";
$CfgDefault_WorldEditor_Misc_documentationURL = "http://www.garagegames.com/products/torque-3d/documentation/user";
$CfgDefault_WorldEditor_Misc_forumURL = "http://www.garagegames.com/products/torque-3d/forums";
$CfgDefault_WorldEditor_Misc_lockedHandle = "tlab/art/icons/default/LockedHandle";
$CfgDefault_WorldEditor_Misc_planeDim = "500";
$CfgDefault_WorldEditor_Misc_renderPlane = "0";
$CfgDefault_WorldEditor_Misc_renderPlaneHashes = "0";
$CfgDefault_WorldEditor_Misc_selectHandle = "tlab/art/icons/default/SelectHandle";
$Cfg_3DView_SelectByGroup = "1";
$Cfg_BrushTool_isEnabled = "1";
$Cfg_BrushTool_pluginOrder = "5";
$Cfg_BrushTool_pluginOrderDefault = "3";
$Cfg_BrushTool_renderMesh = "0";
$Cfg_Common_Camera_cameraDisplayMode = "Standard";
$Cfg_Common_Camera_cameraDisplayType = "6";
$Cfg_Common_Camera_cameraSpeed = "71.2575";
$Cfg_Common_Camera_CamViewEnabled = "0";
$Cfg_Common_Camera_DefaultControlMode = "Player";
$Cfg_Common_Camera_invertXAxis = "0";
$Cfg_Common_Camera_invertYAxis = "0";
$Cfg_Common_Camera_LaunchDefaultAlways = "0";
$Cfg_Common_Camera_LaunchFreeviewAlways = "1";
$Cfg_Common_Camera_LaunchInFreeview = "0";
$Cfg_Common_Camera_MouseMoveMultiplier = "0.31";
$Cfg_Common_Camera_MouseScrollMultiplier = "0.57";
$Cfg_Common_Camera_movementSpeed = "40";
$Cfg_Common_Camera_orthoFOV = "0.681498";
$Cfg_Common_Camera_renderOrthoGrid = "0";
$Cfg_Common_Color_dragRectColor = "220 254 3 255";
$Cfg_Common_Color_faceSelectColor = "213 192 192 255";
$Cfg_Common_Color_gridColor = "85 3 0 203";
$Cfg_Common_Color_gridMinorTickColor = "36 67 15 255";
$Cfg_Common_Color_gridOriginColor = "183 183 183 255";
$Cfg_Common_Color_objectTextColor = "248 254 3 255";
$Cfg_Common_Color_objMouseOverColor = "254 3 9 255";
$Cfg_Common_Color_objMouseOverSelectColor = "197 199 46 254";
$Cfg_Common_Color_objSelectColor = "254 155 3 255";
$Cfg_Common_Color_popupBackgroundColor = "85 3 169 185";
$Cfg_Common_Color_popupTextColor = "84 1 169 255";
$Cfg_Common_Color_raceSelectColor = "243 254 3 255";
$Cfg_Common_Color_selectGridColor = "248 164 76 65";
$Cfg_Common_Color_selectionBoxColor = "18 238 64 255";
$Cfg_Common_Color_uvEditorHandleColor = "254 3 9 255";
$Cfg_Common_General_levelsDirectory = "art/Worlds/TorqueLab";
$Cfg_Common_General_TorsionPath = "C:\\Program Files (x86)\\Torsion\\Torsion.exe";
$Cfg_Common_General_undoLimit = "40";
$Cfg_Common_Gizmo_ = "0.380392 0.996078 0.333333 1";
$Cfg_Common_Gizmo_allowSnapRotations = "1";
$Cfg_Common_Gizmo_allowSnapScale = "1";
$Cfg_Common_Gizmo_alwaysRotationSnap = "1";
$Cfg_Common_Gizmo_gridColor = "243 254 3 248";
$Cfg_Common_Gizmo_gridSize = "10";
$Cfg_Common_Gizmo_planeDim = "40";
$Cfg_Common_Gizmo_renderInfoText = "1";
$Cfg_Common_Gizmo_renderMoveGrid = "1";
$Cfg_Common_Gizmo_renderPlane = "0";
$Cfg_Common_Gizmo_renderPlaneHashes = "1";
$Cfg_Common_Gizmo_renderSolid = "0";
$Cfg_Common_Gizmo_renderWhenUsed = "1";
$Cfg_Common_Gizmo_rotateScalar = "0.8";
$Cfg_Common_Gizmo_rotationSnap = "7.25";
$Cfg_Common_Gizmo_scaleScalar = "0.7";
$Cfg_Common_Gizmo_scaleSnap = "0.4";
$Cfg_Common_Gizmo_screenLength = "89";
$Cfg_Common_Gizmo_snapToGrid = "0";
$Cfg_Common_Grid_forceToGrid = "0";
$Cfg_Common_Grid_forceToGridNoZ = "1";
$Cfg_Common_Grid_gridSize = "1.5";
$Cfg_Common_Grid_gridStep = "1";
$Cfg_Common_Grid_planeDim = "200";
$Cfg_Common_Grid_renderPlane = "0";
$Cfg_Common_Grid_renderPlaneHashes = "1";
$Cfg_Common_Objects_boundingBoxCollision = "1";
$Cfg_Common_Objects_dropAtScreenCenterMax = "100.0";
$Cfg_Common_Objects_dropAtScreenCenterScalar = "0";
$Cfg_Common_Objects_dropType = "toTerrain";
$Cfg_Common_Objects_fadeIcons = "1";
$Cfg_Common_Objects_fadeIconsDist = "12";
$Cfg_Common_Objects_forceLoadDAE = "0";
$Cfg_Common_Objects_gridSnap = "0";
$Cfg_Common_Objects_IgnoreDropSelRotation = "0";
$Cfg_Common_Objects_renderObjHandle = "1";
$Cfg_Common_Objects_renderObjText = "1";
$Cfg_Common_Objects_renderPopupBackground = "1";
$Cfg_Common_Objects_renderSelectionBox = "1";
$Cfg_Common_Objects_showMousePopupInfo = "1";
$Cfg_Common_Objects_snapGround = "0";
$Cfg_Common_Objects_snapSoft = "0";
$Cfg_Common_Objects_snapSoftSize = "2.0";
$Cfg_convexEditor_isEnabled = 1;
$Cfg_convexEditor_pluginOrder = "15";
$Cfg_ConvexEditor_pluginOrderDefault = "16";
$Cfg_DatablockEditor_excludeClientOnlyDatablocks = "1";
$Cfg_datablockEditor_isEnabled = 1;
$Cfg_datablockEditor_pluginOrder = "6";
$Cfg_DatablockEditor_pluginOrderDefault = "12";
$Cfg_DecalEditor_DefaultScale = "1";
$Cfg_decalEditor_isEnabled = 1;
$Cfg_decalEditor_pluginOrder = "5";
$Cfg_DecalEditor_pluginOrderDefault = "8";
$Cfg_Dev_Console_DevLogLevel = "1";
$Cfg_Dev_Console_MouseDragLog = "1";
$Cfg_Dev_Console_MouseDragLogDelay = "0.5";
$Cfg_Dev_Console_MouseLog = "1";
$Cfg_Dev_Console_MouseMoveLog = "1";
$Cfg_Dev_Console_MouseMoveLogDelay = "0.5";
$Cfg_Dev_Console_ShowInfos = "1";
$Cfg_Dev_Console_ShowNotes = "1";
$Cfg_Dev_Console_ShowParamLog = "0";
$Cfg_Dev_Console_TraceLogLevel = "1";
$Cfg_Development_Console_DevLogLevel = "0";
$Cfg_Development_Console_MouseDragLog = "0";
$Cfg_Development_Console_MouseDragLogDelay = "0";
$Cfg_Development_Console_MouseLog = "0";
$Cfg_Development_Console_MouseMoveLog = "0";
$Cfg_Development_Console_MouseMoveLogDelay = "0";
$Cfg_Development_Console_ShowInfos = "1";
$Cfg_Development_Console_ShowNotes = "1";
$Cfg_Development_Console_ShowParamLog = "0";
$Cfg_Development_Console_TraceLogLevel = "0";
$Cfg_ForestEditor_BrushHardness = "2";
$Cfg_ForestEditor_BrushPressure = "2";
$Cfg_ForestEditor_BrushSize = "5";
$Cfg_ForestEditor_DefaultBrush = "BaseBrush";
$Cfg_ForestEditor_DefaultBrushHardness = "50";
$Cfg_ForestEditor_DefaultBrushPressure = "20";
$Cfg_ForestEditor_DefaultBrushSize = "5";
$Cfg_ForestEditor_DefaultGlobalScale = "1";
$Cfg_ForestEditor_GlobalScale = "1";
$Cfg_forestEditor_isEnabled = 1;
$Cfg_forestEditor_pluginOrder = "9";
$Cfg_ForestEditor_pluginOrderDefault = "4";
$Cfg_General_Console_DevLogLevel = "1";
$Cfg_General_Console_ShowInfos = "1";
$Cfg_General_Console_ShowNotes = "1";
$Cfg_General_Console_ShowParamLog = "0";
$Cfg_General_Console_TraceLogLevel = "0";
$Cfg_General_Misc_undoLimit = "40";
$Cfg_GuiEditor__ = "";
$Cfg_GuiEditor_drawBorderLines_fullBox = "1";
$Cfg_GuiEditor_drawGuides_fullBox = "1";
$Cfg_GuiEditor_Editor_lastPath = "G:/GameProjects/AlterVerse/LOCAL/Game/tlab/materialEditor/gui";
$Cfg_GuiEditor_Editor_previewResolution = "1440 900";
$Cfg_GuiEditor_EngineDevelopment_showEditorGuis = "";
$Cfg_GuiEditor_EngineDevelopment_showEditorProfiles = "";
$Cfg_GuiEditor_EngineDevelopment_toggleIntoEditor = "";
$Cfg_GuiEditor_GuiEditor_lastPath = "F:/Game Workplace/AlterVerse/GameGIT/tlab/EditorLab/guiSystem/SideBar";
$Cfg_GuiEditor_GuiEditor_previewResolution = "1440 900";
$Cfg_GuiEditor_Help_documentationLocal = "";
$Cfg_GuiEditor_Help_documentationReference = "";
$Cfg_GuiEditor_Help_documentationURL = "";
$Cfg_GuiEditor_Library_viewType = "Categorized";
$Cfg_GuiEditor_Rendering_drawBorderLines = "1";
$Cfg_GuiEditor_Rendering_drawGuides = "1";
$Cfg_GuiEditor_Selection_fullBox = "1";
$Cfg_GuiEditor_Snapping_sensitivity = "2";
$Cfg_GuiEditor_Snapping_snap2Grid = "";
$Cfg_GuiEditor_Snapping_snap2GridSize = "";
$Cfg_GuiEditor_Snapping_snapToCanvas = "1";
$Cfg_GuiEditor_Snapping_snapToCenters = "0";
$Cfg_GuiEditor_Snapping_snapToControls = "0";
$Cfg_GuiEditor_Snapping_snapToEdges = "0";
$Cfg_GuiEditor_Snapping_snapToGuides = "0";
$Cfg_Interface_Editor_ToolFrameLocked = "0";
$Cfg_Interface_Editor_ToolFrameSize = "Normal";
$Cfg_ipsEditor_isEnabled = 1;
$Cfg_ipsEditor_pluginOrder = "4";
$Cfg_IpsEditor_pluginOrderDefault = "6";
$Cfg_LockToolbar = 0;
$Cfg_MaterialEditor_AOSuffix = "_s";
$Cfg_MaterialEditor_AutoAddAO = "1";
$Cfg_MaterialEditor_AutoAddComposite = "1";
$Cfg_MaterialEditor_AutoAddMetalness = "1";
$Cfg_MaterialEditor_AutoAddNormal = "1";
$Cfg_MaterialEditor_AutoAddSmoothness = "1";
$Cfg_MaterialEditor_AutoAddSpecular = "1";
$Cfg_MaterialEditor_CompositeSuffix = "_s";
$Cfg_MaterialEditor_DefaultMaterialFile = "10";
$Cfg_MaterialEditor_DiffuseSuffix = "_d";
$Cfg_materialEditor_isEnabled = 1;
$Cfg_MaterialEditor_MapModePBR = "1";
$Cfg_MaterialEditor_MetalnessSuffix = "_s";
$Cfg_MaterialEditor_NormalSuffix = "_n";
$Cfg_MaterialEditor_PBRenabled = "1";
$Cfg_materialEditor_pluginOrder = "2";
$Cfg_MaterialEditor_pluginOrderDefault = "5";
$Cfg_MaterialEditor_PropShowGroup_Advanced = "0";
$Cfg_MaterialEditor_PropShowGroup_Animation = "0";
$Cfg_MaterialEditor_PropShowGroup_Lighting = "1";
$Cfg_MaterialEditor_PropShowGroup_PBR = "0";
$Cfg_MaterialEditor_PropShowGroup_Rendering = "1";
$Cfg_MaterialEditor_PropShowGroup_TextureMaps = 1;
$Cfg_MaterialEditor_PropShowMap_Detail = "0";
$Cfg_MaterialEditor_PropShowMap_detailNormal = "0";
$Cfg_MaterialEditor_PropShowMap_Diffuse = 1;
$Cfg_MaterialEditor_PropShowMap_environment = "0";
$Cfg_MaterialEditor_PropShowMap_Light = "0";
$Cfg_MaterialEditor_PropShowMap_Normal = "1";
$Cfg_MaterialEditor_PropShowMap_Overlay = "0";
$Cfg_MaterialEditor_PropShowMap_specular = "0";
$Cfg_MaterialEditor_PropShowMap_tone = "0";
$Cfg_MaterialEditor_SmoothnessSuffix = "_s";
$Cfg_MaterialEditor_SpecularSuffix = "_s";
$Cfg_MaterialEditor_ThumbnailCountIndex = "3";
$Cfg_meshRoadEditor_isEnabled = 1;
$Cfg_meshRoadEditor_pluginOrder = "10";
$Cfg_MeshRoadEditor_pluginOrderDefault = "9";
$Cfg_NavEditor_backgroundBuild = "1";
$Cfg_navEditor_isEnabled = 1;
$Cfg_NavEditor_playSoundWhenDone = "1";
$Cfg_navEditor_pluginOrder = "7";
$Cfg_NavEditor_pluginOrderDefault = "15";
$Cfg_NavEditor_renderBVTree = "0";
$Cfg_NavEditor_renderMesh = "0";
$Cfg_NavEditor_renderPortals = "0";
$Cfg_NavEditor_saveIntermediates = "1";
$Cfg_NavEditor_spawnClass = "AIPlayer";
$Cfg_NavEditor_spawnDatablock = "DemoPlayerData";
$Cfg_particleEditor_isEnabled = 1;
$Cfg_particleEditor_pluginOrder = "8";
$Cfg_ParticleEditor_pluginOrderDefault = "14";
$Cfg_Plugins_ShapeLab_UseSimplifiedSystem = 1;
$Cfg_RiverEditor_DefaultDepth = "5";
$Cfg_RiverEditor_DefaultNormal = "0 0 1";
$Cfg_RiverEditor_DefaultWidth = "10";
$Cfg_RiverEditor_HoverNodeColor = "255 255 255 255";
$Cfg_RiverEditor_HoverSplineColor = "255 0 0 255";
$Cfg_riverEditor_isEnabled = 1;
$Cfg_riverEditor_pluginOrder = "11";
$Cfg_RiverEditor_pluginOrderDefault = "11";
$Cfg_RiverEditor_SelectedSplineColor = "255 0 255 255";
$Cfg_RoadEditor_borderMovePixelSize = "20";
$Cfg_RoadEditor_borderMoveSpeed = "0.1";
$Cfg_RoadEditor_consoleCircleSegments = "32";
$Cfg_RoadEditor_consoleFillColor = "0 0 0 0";
$Cfg_RoadEditor_consoleFrameColor = "255 0 0 255";
$Cfg_RoadEditor_consoleLineWidth = "1";
$Cfg_RoadEditor_consoleSphereLevel = "1";
$Cfg_RoadEditor_DefaultWidth = "10";
$Cfg_RoadEditor_HoverNodeColor = "255 255 255 255";
$Cfg_RoadEditor_HoverSplineColor = "255 0 0 255";
$Cfg_roadEditor_isEnabled = 1;
$Cfg_RoadEditor_MaterialName = "DefaultDecalRoadMaterial";
$Cfg_roadEditor_pluginOrder = "12";
$Cfg_RoadEditor_pluginOrderDefault = "10";
$Cfg_RoadEditor_SelectedSplineColor = "0 255 0 255";
$Cfg_SceneEditor_AutoCreatePrefab = "1";
$Cfg_SceneEditor_AutoLight_ShowLights = "0";
$Cfg_SceneEditor_AutoLight_ShowShapes = "0";
$Cfg_SceneEditor_AutoPrefabFolder = "art/models/prefabs/";
$Cfg_SceneEditor_AutoPrefabMode = "1";
$Cfg_SceneEditor_CoreGroup = "mgCore";
$Cfg_SceneEditor_CoverPointGroup = "CoverPoint";
$Cfg_SceneEditor_DropLocation = "10";
$Cfg_SceneEditor_EnvironmentGroup = "mgEnvironment";
$Cfg_SceneEditor_GroundCoverDefaultMaterial = "grass1";
$Cfg_SceneEditor_IconWidth = "120";
$Cfg_SceneEditor_isEnabled = 1;
$Cfg_SceneEditor_LightsGroup = "mgLights";
$Cfg_SceneEditor_MiscObjectGroup = "mgMiscObject";
$Cfg_SceneEditor_NavAIGroup = "NavAI";
$Cfg_SceneEditor_NavMeshGroup = "NavMesh";
$Cfg_SceneEditor_NavPathGroup = "NavPath";
$Cfg_SceneEditor_Occluders = "mgOccluders";
$Cfg_SceneEditor_pluginOrder = "1";
$Cfg_SceneEditor_pluginOrderDefault = "1";
$Cfg_SceneEditor_renameInternal = "0";
$Cfg_SceneEditor_SceneObjectsGroup = "mgSceneObjects";
$Cfg_SceneEditor_ShapeGroup = "mgShapeGroup";
$Cfg_SceneEditor_showClassNames = "0";
$Cfg_SceneEditor_showInternalNames = "0";
$Cfg_SceneEditor_showObjectIds = "0";
$Cfg_SceneEditor_showObjectNames = "1";
$Cfg_SceneEditor_SpawnGroup = "PlayerDropPoints";
$Cfg_SceneEditor_TSStaticGroup = "mgMapModels";
$Cfg_SceneEditor_Vehicle = "Vehicle";
$Cfg_ShapeEditor_AdvancedWindowVisible = "1";
$Cfg_ShapeEditor_AnimationBarVisible = "1";
$Cfg_ShapeEditor_BackgroundColor = "0 0 0 0.9";
$Cfg_ShapeEditor_GridDimension = "40 40";
$Cfg_ShapeEditor_GridSize = "0.1";
$Cfg_ShapeEditor_HighlightMaterial = "1";
$Cfg_ShapeEditor_isEnabled = "1";
$Cfg_ShapeEditor_pluginOrder = "13";
$Cfg_ShapeEditor_pluginOrderDefault = "13";
$Cfg_ShapeEditor_PreviewColorBG = "0 0 0 0.9";
$Cfg_ShapeEditor_RenderCollision = "0";
$Cfg_ShapeEditor_RenderMounts = "1";
$Cfg_ShapeEditor_ShowBounds = "0";
$Cfg_ShapeEditor_ShowGrid = "1";
$Cfg_ShapeEditor_ShowNodes = "1";
$Cfg_ShapeEditor_ShowObjBox = "1";
$Cfg_ShapeEditor_SunAmbientColor = "180 180 180 255";
$Cfg_ShapeEditor_SunAngleX = "45";
$Cfg_ShapeEditor_SunAngleZ = "135";
$Cfg_ShapeEditor_SunDiffuseColor = "255 255 255 255";
$Cfg_ShapeLab_AdvancedWindowVisible = "1";
$Cfg_ShapeLab_AnimationBarVisible = "1";
$Cfg_ShapeLab_BackgroundColor = "0.12549 0.00784314 0.996078 1";
$Cfg_ShapeLab_GridDimension = "40 30";
$Cfg_ShapeLab_GridSize = "0.1";
$Cfg_ShapeLab_HighlightMaterial = "1";
$Cfg_ShapeLab_isEnabled = 1;
$Cfg_ShapeLab_pluginOrder = "3";
$Cfg_ShapeLab_pluginOrderDefault = "7";
$Cfg_ShapeLab_PreviewColorBG = "0 0 0 0.9";
$Cfg_ShapeLab_RenderCollision = "0";
$Cfg_ShapeLab_RenderMounts = "1";
$Cfg_ShapeLab_ShowBounds = "0";
$Cfg_ShapeLab_ShowGrid = "1";
$Cfg_ShapeLab_ShowNodes = "1";
$Cfg_ShapeLab_ShowObjBox = "1";
$Cfg_ShapeLab_SunAmbientColor = "180";
$Cfg_ShapeLab_SunAngleX = "45";
$Cfg_ShapeLab_SunAngleZ = "135";
$Cfg_ShapeLab_SunDiffuseColor = "255";
$Cfg_TerrainEditor_adjustHeightVal = "10";
$Cfg_TerrainEditor_BrushPressure = "1";
$Cfg_TerrainEditor_BrushSetHeight = "1";
$Cfg_TerrainEditor_BrushSetHeightRange = "0 100";
$Cfg_TerrainEditor_BrushSize = "2";
$Cfg_TerrainEditor_BrushSoftness = "1";
$Cfg_TerrainEditor_BrushType = "box";
$Cfg_TerrainEditor_DefaultBrushPressure = "43";
$Cfg_TerrainEditor_DefaultBrushSetHeight = "100";
$Cfg_TerrainEditor_DefaultBrushSize = "8";
$Cfg_TerrainEditor_DefaultBrushSoftness = "43";
$Cfg_TerrainEditor_DefaultBrushType = "box";
$Cfg_TerrainEditor_isEnabled = 1;
$Cfg_TerrainEditor_maxBrushSize = "40 40";
$Cfg_TerrainEditor_noiseFactor = "1";
$Cfg_TerrainEditor_pluginOrder = "13";
$Cfg_TerrainEditor_pluginOrderDefault = "2";
$Cfg_TerrainEditor_scaleVal = "1";
$Cfg_TerrainEditor_setHeightVal = "100";
$Cfg_TerrainEditor_slopeMaxAngle = "90";
$Cfg_TerrainEditor_slopeMinAngle = "0";
$Cfg_TerrainEditor_smoothFactor = "0.1";
$Cfg_TerrainEditor_softSelectDefaultFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$Cfg_TerrainEditor_softSelectFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$Cfg_TerrainEditor_softSelectRadius = "50";
$Cfg_TerrainPainter_adjustHeightVal = "10";
$Cfg_TerrainPainter_DefaultBrushPressure = "50";
$Cfg_TerrainPainter_DefaultBrushSize = "2";
$Cfg_TerrainPainter_DefaultBrushSlopeMax = "90";
$Cfg_TerrainPainter_DefaultBrushSlopeMin = "0";
$Cfg_TerrainPainter_DefaultBrushSoftness = "50";
$Cfg_TerrainPainter_DefaultBrushType = "box";
$Cfg_TerrainPainter_isEnabled = 1;
$Cfg_TerrainPainter_noiseFactor = "1";
$Cfg_TerrainPainter_pluginOrder = "14";
$Cfg_TerrainPainter_pluginOrderDefault = "3";
$Cfg_TerrainPainter_scaleVal = "1";
$Cfg_TerrainPainter_setHeightVal = "100";
$Cfg_TerrainPainter_slopeMaxAngle = "90";
$Cfg_TerrainPainter_slopeMinAngle = "0";
$Cfg_TerrainPainter_smoothFactor = "0.1";
$Cfg_TerrainPainter_softSelectDefaultFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$Cfg_TerrainPainter_softSelectFilter = "1.000000 0.833333 0.666667 0.500000 0.333333 0.166667 0.000000";
$Cfg_TerrainPainter_softSelectRadius = "50";
$Cfg_TLab_AutoSaveDelay = "5";
$Cfg_TLab_Class_Inspector_LogLevel = 0;
$Cfg_TLab_defaultGui = "MainMenuGui";
$Cfg_TLab_GuiEditorLoaded = 1;
$Cfg_TLab_LeftFrameMin = "123";
$Cfg_TLab_Object_DropTypes = "atOrigin atCamera atCameraRot belowCamera screenCenter atCentroid toTerrain belowSelection";
$Cfg_TLab_PrefabAutoMode = "Level Object Folder";
$Cfg_TLab_RightFrameMin = "123";
$Cfg_TLab_Theme = "Laborean";
$Cfg_TLab_ThemePath = "tlab/themes/Laborean/";
$Cfg_TorsionPath = "40";
$Cfg_UI_Editor_SideFrameWidth = "220";
$Cfg_UI_Editor_ToolFrameWidth = "280";
$Cfg_UI_Frame_SideFrameWidth = "220";
$Cfg_UI_Frame_ToolFrameWidth = "280";
$Cfg_UI_Menu_UseNativeMenu = 0;
$Cfg_WorldEditor_Misc_defaultHandle = "tlab/art/icons/default/DefaultHandle";
$Cfg_WorldEditor_Misc_documentationLocal = "../../../Documentation/Official";
$Cfg_WorldEditor_Misc_documentationReference = "../../../Documentation/Torque";
$Cfg_WorldEditor_Misc_documentationURL = "http://www.garagegames.com/products/torque-3d/documentation/user";
$Cfg_WorldEditor_Misc_forumURL = "http://www.garagegames.com/products/torque-3d/forums";
$Cfg_WorldEditor_Misc_lockedHandle = "tlab/art/icons/default/LockedHandle";
$Cfg_WorldEditor_Misc_planeDim = "500";
$Cfg_WorldEditor_Misc_renderPlane = "0";
$Cfg_WorldEditor_Misc_renderPlaneHashes = "0";
$Cfg_WorldEditor_Misc_selectHandle = "tlab/art/icons/default/SelectHandle";
| |
using System;
using System.Collections;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.Drawing.Drawing2D;
using System.Reflection;
using System.Runtime.InteropServices;
/*using System.Windows.Forms;*/
using System.Xml;
using SharpVectors.Dom.Svg;
namespace SharpVectors.Renderer.Gdi
{
/// <summary>
/// Wraps a Graphics object since it's sealed
/// </summary>
public class GraphicsWrapper : IDisposable
{
#region Private Fields
private bool _isStatic;
private Graphics _graphics;
private Graphics _idMapGraphics;
private Bitmap _idMapImage;
#endregion
#region Constructors
private GraphicsWrapper(Image image, bool isStatic)
{
this._isStatic = isStatic;
if(!IsStatic)
{
_idMapImage = new Bitmap(image.Width, image.Height);
_idMapGraphics = Graphics.FromImage(_idMapImage);
_idMapGraphics.InterpolationMode = InterpolationMode.NearestNeighbor;
_idMapGraphics.SmoothingMode = SmoothingMode.None;
_idMapGraphics.CompositingQuality = CompositingQuality.Invalid;
}
_graphics = Graphics.FromImage(image);
}
private GraphicsWrapper(IntPtr hdc, bool isStatic)
{
this._isStatic = isStatic;
if(!IsStatic)
{
// This will get resized when the actual size is known
_idMapImage = new Bitmap(0, 0);
_idMapGraphics = Graphics.FromImage(_idMapImage);
_idMapGraphics.InterpolationMode = InterpolationMode.NearestNeighbor;
_idMapGraphics.SmoothingMode = SmoothingMode.None;
_idMapGraphics.CompositingQuality = CompositingQuality.Invalid;
}
_graphics = Graphics.FromHdc(hdc);
}
#endregion
public static GraphicsWrapper FromImage(Image image, bool isStatic)
{
return new GraphicsWrapper(image, isStatic);
}
public static GraphicsWrapper FromHdc(IntPtr hdc, bool isStatic)
{
return new GraphicsWrapper(hdc, isStatic);
}
#region Properties
public bool IsStatic
{
get{return _isStatic;}
set{
_isStatic = value;
_idMapGraphics.Dispose();
_idMapGraphics = null;
}
}
public Graphics Graphics
{
get{return _graphics;}
set{_graphics = value;}
}
public Graphics IdMapGraphics
{
get{return _graphics;}
}
public Bitmap IdMapRaster
{
get{return _idMapImage;}
}
#endregion
#region Graphics members
public void Clear(Color color)
{
_graphics.Clear(color);
if(_idMapGraphics != null) _idMapGraphics.Clear(Color.Empty);
}
public void Dispose()
{
_graphics.Dispose();
if(_idMapGraphics != null) _idMapGraphics.Dispose();
}
public GraphicsContainerWrapper BeginContainer()
{
GraphicsContainerWrapper container = new GraphicsContainerWrapper();
if(_idMapGraphics != null) container.idmapGraphicsContainer = _idMapGraphics.BeginContainer();
container.mainGraphicsContainer = _graphics.BeginContainer();
return container;
}
public void EndContainer(GraphicsContainerWrapper container)
{
if(_idMapGraphics != null) _idMapGraphics.EndContainer(container.idmapGraphicsContainer);
_graphics.EndContainer(container.mainGraphicsContainer);
}
public SmoothingMode SmoothingMode
{
get{return _graphics.SmoothingMode;}
set{_graphics.SmoothingMode = value;}
}
public Matrix Transform
{
get{return _graphics.Transform;}
set
{
if(_idMapGraphics != null) _idMapGraphics.Transform = value;
_graphics.Transform = value;
}
}
public void SetClip(GraphicsPath path)
{
_graphics.SetClip(path);
}
public void SetClip(RectangleF rect)
{
if(_idMapGraphics != null) _idMapGraphics.SetClip(rect);
_graphics.SetClip(rect);
}
public void SetClip(Region region, CombineMode combineMode)
{
if(_idMapGraphics != null) _idMapGraphics.SetClip(region, combineMode);
_graphics.SetClip(region, combineMode);
}
public void TranslateClip(float x, float y)
{
if(_idMapGraphics != null) _idMapGraphics.TranslateClip(x, y);
_graphics.TranslateClip(x, y);
}
public void ResetClip()
{
if(_idMapGraphics != null) _idMapGraphics.ResetClip();
_graphics.ResetClip();
}
public void FillPath(GraphicsNode grNode, Brush brush, GraphicsPath path )
{
if(_idMapGraphics != null)
{
Brush idBrush = new SolidBrush(grNode.UniqueColor);
if(grNode.Element is SvgTextContentElement)
{
_idMapGraphics.FillRectangle(idBrush, path.GetBounds());
}
else
{
_idMapGraphics.FillPath(idBrush, path);
}
}
_graphics.FillPath(brush, path);
}
public void DrawPath(GraphicsNode grNode, Pen pen, GraphicsPath path)
{
if(_idMapGraphics != null)
{
Pen idPen = new Pen(grNode.UniqueColor, pen.Width);
_idMapGraphics.DrawPath(idPen, path);
}
_graphics.DrawPath(pen, path);
}
public void TranslateTransform(float dx, float dy)
{
if(_idMapGraphics != null) _idMapGraphics.TranslateTransform(dx, dy);
_graphics.TranslateTransform(dx, dy);
}
public void ScaleTransform(float sx, float sy)
{
if(_idMapGraphics != null) _idMapGraphics.ScaleTransform(sx, sy);
_graphics.ScaleTransform(sx, sy);
}
public void RotateTransform(float angle)
{
if(_idMapGraphics != null) _idMapGraphics.RotateTransform(angle);
_graphics.RotateTransform(angle);
}
public void DrawImage(GraphicsNode grNode, Image image, Rectangle destRect, float srcX, float srcY, float srcWidth, float srcHeight, GraphicsUnit graphicsUnit, ImageAttributes imageAttributes)
{
if(_idMapGraphics != null)
{
// This handles pointer-events for visibleFill visibleStroke and visible
/*Brush idBrush = new SolidBrush(grNode.UniqueColor);
GraphicsPath gp = new GraphicsPath();
gp.AddRectangle(destRect);
_idMapGraphics.FillPath(idBrush, gp);*/
Color unique = grNode.UniqueColor;
float r = (float)unique.R / 255;
float g = (float)unique.G / 255;
float b = (float)unique.B / 255;
ColorMatrix colorMatrix = new ColorMatrix(
new float[][] { new float[] {0f, 0f, 0f, 0f, 0f},
new float[] {0f, 0f, 0f, 0f, 0f},
new float[] {0f, 0f, 0f, 0f, 0f},
new float[] {0f, 0f, 0f, 1f, 0f},
new float[] {r, g, b, 0f, 1f} });
ImageAttributes ia = new ImageAttributes();
ia.SetColorMatrix(colorMatrix,ColorMatrixFlag.Default, ColorAdjustType.Bitmap);
_idMapGraphics.DrawImage(image, destRect, srcX, srcY, srcWidth, srcHeight, graphicsUnit, ia);
}
_graphics.DrawImage(image, destRect, srcX, srcY, srcWidth, srcHeight, graphicsUnit, imageAttributes);
}
#endregion
}
/// <summary>
/// Wraps a GraphicsContainer because it is sealed.
/// This is a helper for GraphicsWrapper so that it can save
/// multiple container states. It holds the containers
/// for both the idMapGraphics and the main graphics
/// being rendered in the GraphicsWrapper.
/// </summary>
public struct GraphicsContainerWrapper {
internal GraphicsContainer idmapGraphicsContainer;
internal GraphicsContainer mainGraphicsContainer;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Xunit;
namespace System.Linq.Tests
{
public class RangeTests : EnumerableTests
{
[Fact]
public void Range_ProduceCorrectSequence()
{
var rangeSequence = Enumerable.Range(1, 100);
int expected = 0;
foreach (var val in rangeSequence)
{
expected++;
Assert.Equal(expected, val);
}
Assert.Equal(100, expected);
}
[Fact]
public void Range_ToArray_ProduceCorrectResult()
{
var array = Enumerable.Range(1, 100).ToArray();
Assert.Equal(array.Length, 100);
for (var i = 0; i < array.Length; i++)
Assert.Equal(i + 1, array[i]);
}
[Fact]
public void Range_ToList_ProduceCorrectResult()
{
var list = Enumerable.Range(1, 100).ToList();
Assert.Equal(list.Count, 100);
for (var i = 0; i < list.Count; i++)
Assert.Equal(i + 1, list[i]);
}
[Fact]
public void Range_ZeroCountLeadToEmptySequence()
{
var array = Enumerable.Range(1, 0).ToArray();
var array2 = Enumerable.Range(int.MinValue, 0).ToArray();
var array3 = Enumerable.Range(int.MaxValue, 0).ToArray();
Assert.Equal(array.Length, 0);
Assert.Equal(array2.Length, 0);
Assert.Equal(array3.Length, 0);
}
[Fact]
public void Range_ThrowExceptionOnNegativeCount()
{
Assert.Throws<ArgumentOutOfRangeException>("count", () => Enumerable.Range(1, -1));
Assert.Throws<ArgumentOutOfRangeException>("count", () => Enumerable.Range(1, int.MinValue));
}
[Fact]
public void Range_ThrowExceptionOnOverflow()
{
Assert.Throws<ArgumentOutOfRangeException>("count", () => Enumerable.Range(1000, int.MaxValue));
Assert.Throws<ArgumentOutOfRangeException>("count", () => Enumerable.Range(int.MaxValue, 1000));
Assert.Throws<ArgumentOutOfRangeException>("count", () => Enumerable.Range(Int32.MaxValue - 10, 20));
}
[Fact]
public void Range_NotEnumerateAfterEnd()
{
using (var rangeEnum = Enumerable.Range(1, 1).GetEnumerator())
{
Assert.True(rangeEnum.MoveNext());
Assert.False(rangeEnum.MoveNext());
Assert.False(rangeEnum.MoveNext());
}
}
[Fact]
public void Range_EnumerableAndEnumeratorAreSame()
{
var rangeEnumerable = Enumerable.Range(1, 1);
using (var rangeEnumerator = rangeEnumerable.GetEnumerator())
{
Assert.Same(rangeEnumerable, rangeEnumerator);
}
}
[Fact]
public void Range_GetEnumeratorReturnUniqueInstances()
{
var rangeEnumerable = Enumerable.Range(1, 1);
using (var enum1 = rangeEnumerable.GetEnumerator())
using (var enum2 = rangeEnumerable.GetEnumerator())
{
Assert.NotSame(enum1, enum2);
}
}
[Fact]
public void Range_ToInt32MaxValue()
{
int from = Int32.MaxValue - 3;
int count = 4;
var rangeEnumerable = Enumerable.Range(from, count);
Assert.Equal(count, rangeEnumerable.Count());
int[] expected = { Int32.MaxValue - 3, Int32.MaxValue - 2, Int32.MaxValue - 1, Int32.MaxValue };
Assert.Equal(expected, rangeEnumerable);
}
[Fact]
public void RepeatedCallsSameResults()
{
Assert.Equal(Enumerable.Range(-1, 2), Enumerable.Range(-1, 2));
Assert.Equal(Enumerable.Range(0, 0), Enumerable.Range(0, 0));
}
[Fact]
public void NegativeStart()
{
int start = -5;
int count = 1;
int[] expected = { -5 };
Assert.Equal(expected, Enumerable.Range(start, count));
}
[Fact]
public void ArbitraryStart()
{
int start = 12;
int count = 6;
int[] expected = { 12, 13, 14, 15, 16, 17 };
Assert.Equal(expected, Enumerable.Range(start, count));
}
[Fact]
public void Take()
{
Assert.Equal(Enumerable.Range(0, 10), Enumerable.Range(0, 20).Take(10));
}
[Fact]
public void TakeExcessive()
{
Assert.Equal(Enumerable.Range(0, 10), Enumerable.Range(0, 10).Take(int.MaxValue));
}
[Fact]
public void Skip()
{
Assert.Equal(Enumerable.Range(10, 10), Enumerable.Range(0, 20).Skip(10));
}
[Fact]
public void SkipExcessive()
{
Assert.Empty(Enumerable.Range(10, 10).Skip(20));
}
[Fact]
public void ElementAt()
{
Assert.Equal(4, Enumerable.Range(0, 10).ElementAt(4));
}
[Fact]
public void ElementAtExcessiveThrows()
{
Assert.Throws<ArgumentOutOfRangeException>("index", () => Enumerable.Range(0, 10).ElementAt(100));
}
[Fact]
public void ElementAtOrDefault()
{
Assert.Equal(4, Enumerable.Range(0, 10).ElementAtOrDefault(4));
}
[Fact]
public void ElementAtOrDefaultExcessiveIsDefault()
{
Assert.Equal(0, Enumerable.Range(52, 10).ElementAtOrDefault(100));
}
[Fact]
public void First()
{
Assert.Equal(57, Enumerable.Range(57, 1000000000).First());
}
[Fact]
public void FirstOrDefault()
{
Assert.Equal(-100, Enumerable.Range(-100, int.MaxValue).FirstOrDefault());
}
[Fact]
public void Last()
{
Assert.Equal(1000000056, Enumerable.Range(57, 1000000000).Last());
}
[Fact]
public void LastOrDefault()
{
Assert.Equal(int.MaxValue - 101, Enumerable.Range(-100, int.MaxValue).LastOrDefault());
}
}
}
| |
#region Licensing notice
// Copyright (C) 2012, Alexander Wieser-Kuciel <alexander.wieser@crystalbyte.de>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
#region Using directives
using System;
using System.Runtime.InteropServices;
using Crystalbyte.Spectre.Interop;
using Crystalbyte.Spectre.Projections;
using Crystalbyte.Spectre.Scripting;
#endregion
namespace Crystalbyte.Spectre.UI {
public sealed class Frame : RefCountedCefTypeAdapter {
private readonly StringUtf16 _aboutBlank;
private Frame(IntPtr handle)
: base(typeof (CefFrame)) {
Handle = handle;
_aboutBlank = new StringUtf16("about:blank");
}
public bool IsFocused {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefFrameCapiDelegates.IsFocusedCallback)
Marshal.GetDelegateForFunctionPointer(r.IsFocused,
typeof (CefFrameCapiDelegates.IsFocusedCallback));
var value = function(Handle);
return Convert.ToBoolean(value);
}
}
public bool IsMain {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefFrameCapiDelegates.IsMainCallback)
Marshal.GetDelegateForFunctionPointer(r.IsMain,
typeof (CefFrameCapiDelegates.IsMainCallback));
var value = function(Handle);
return Convert.ToBoolean(value);
}
}
public bool IsValid {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefCommandLineCapiDelegates.IsValidCallback)
Marshal.GetDelegateForFunctionPointer(r.IsValid,
typeof (CefCommandLineCapiDelegates.IsValidCallback
));
var value = function(Handle);
return Convert.ToBoolean(value);
}
}
public long Id {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefBrowserCapiDelegates.GetIdentifierCallback)
Marshal.GetDelegateForFunctionPointer(r.GetIdentifier,
typeof (
CefBrowserCapiDelegates.GetIdentifierCallback));
return function(Handle);
}
}
public string Name {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefDomCapiDelegates.GetNameCallback)
Marshal.GetDelegateForFunctionPointer(r.GetName,
typeof (CefDomCapiDelegates.GetNameCallback));
var handle = function(Handle);
return StringUtf16.ReadStringAndFree(handle);
}
}
public Frame Parent {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefDomCapiDelegates.GetParentCallback)
Marshal.GetDelegateForFunctionPointer(r.GetParent,
typeof (CefDomCapiDelegates.GetParentCallback));
var handle = function(Handle);
return handle == IntPtr.Zero ? null : FromHandle(handle);
}
}
public string Url {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefDownloadItemCapiDelegates.GetUrlCallback)
Marshal.GetDelegateForFunctionPointer(r.GetUrl,
typeof (CefDownloadItemCapiDelegates.GetUrlCallback
));
var handle = function(Handle);
return handle == IntPtr.Zero ? string.Empty : StringUtf16.ReadStringAndFree(handle);
}
}
public Browser Browser {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefBrowserCapiDelegates.GetBrowserCallback)
Marshal.GetDelegateForFunctionPointer(r.GetBrowser,
typeof (CefBrowserCapiDelegates.GetBrowserCallback));
var handle = function(Handle);
return Browser.FromHandle(handle);
}
}
public ScriptingContext Context {
get {
var r = MarshalFromNative<CefFrame>();
var function = (CefFrameCapiDelegates.GetV8contextCallback)
Marshal.GetDelegateForFunctionPointer(r.GetV8context,
typeof (CefFrameCapiDelegates.GetV8contextCallback));
var handle = function(Handle);
return ScriptingContext.FromHandle(handle);
}
}
protected override void DisposeNative() {
if (_aboutBlank.Handle != IntPtr.Zero) {
_aboutBlank.Free();
}
base.DisposeNative();
}
public static Frame FromHandle(IntPtr handle) {
return new Frame(handle);
}
public void Navigate(string address) {
var u = new StringUtf16(address);
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.LoadUrlCallback)
Marshal.GetDelegateForFunctionPointer(r.LoadUrl, typeof (CefFrameCapiDelegates.LoadUrlCallback));
action(Handle, u.Handle);
u.Free();
}
public void Display(string source) {
var u = new StringUtf16(source);
var reflection = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.LoadStringCallback)
Marshal.GetDelegateForFunctionPointer(reflection.LoadString,
typeof (CefFrameCapiDelegates.LoadStringCallback));
action(Handle, u.Handle, _aboutBlank.Handle);
u.Free();
}
public void Navigate(Uri address) {
Navigate(address.AbsoluteUri);
}
public void SelectAll() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.SelectAllCallback)
Marshal.GetDelegateForFunctionPointer(r.SelectAll,
typeof (CefFrameCapiDelegates.SelectAllCallback));
action(Handle);
}
public void Copy() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.CopyCallback2)
Marshal.GetDelegateForFunctionPointer(r.Copy, typeof (CefFrameCapiDelegates.CopyCallback2));
action(Handle);
}
public void Cut() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.CutCallback)
Marshal.GetDelegateForFunctionPointer(r.Cut, typeof (CefFrameCapiDelegates.CutCallback));
action(Handle);
}
public void Undo() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.UndoCallback)
Marshal.GetDelegateForFunctionPointer(r.Undo, typeof (CefFrameCapiDelegates.UndoCallback));
action(Handle);
}
public void Delete() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.DelCallback)
Marshal.GetDelegateForFunctionPointer(r.Del, typeof (CefFrameCapiDelegates.DelCallback));
action(Handle);
}
public void Redo() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.RedoCallback)
Marshal.GetDelegateForFunctionPointer(r.Redo, typeof (CefFrameCapiDelegates.RedoCallback));
action(Handle);
}
public void Paste() {
var r = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.PasteCallback)
Marshal.GetDelegateForFunctionPointer(r.Paste, typeof (CefFrameCapiDelegates.PasteCallback));
action(Handle);
}
public void ExecuteJavascript(string code) {
ExecuteJavascript(code, "about:blank", 0);
}
public void ExecuteJavascript(string code, string scriptUrl) {
ExecuteJavascript(code, scriptUrl, 0);
}
public void ExecuteJavascript(string code, string scriptUrl, int line) {
var s = new StringUtf16(scriptUrl);
var c = new StringUtf16(code);
var reflection = MarshalFromNative<CefFrame>();
var action = (CefFrameCapiDelegates.ExecuteJavaScriptCallback)
Marshal.GetDelegateForFunctionPointer(reflection.ExecuteJavaScript,
typeof (CefFrameCapiDelegates.ExecuteJavaScriptCallback));
action(Handle, c.Handle, s.Handle, line);
c.Free();
s.Free();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Portions derived from React Native:
// Copyright (c) 2015-present, Facebook, Inc.
// Licensed under the MIT License.
using Newtonsoft.Json.Linq;
using ReactNative.Bridge;
using ReactNative.Common;
using ReactNative.Modules.Core;
using ReactNative.Modules.DevSupport;
using ReactNative.Tracing;
using System;
using System.IO;
using System.Reactive.Disposables;
using System.Threading;
using System.Threading.Tasks;
#if WINDOWS_UWP
using Windows.Storage;
using Windows.ApplicationModel.Core;
#else
using PCLStorage;
using System.Reflection;
using System.Windows;
#endif
namespace ReactNative.DevSupport
{
class DevSupportManager : IDevSupportManager, IDisposable
{
private const int NativeErrorCookie = -1;
private const string JSBundleFileName = "ReactNativeDevBundle.js";
private readonly SerialDisposable _pollingDisposable = new SerialDisposable();
private readonly IReactInstanceDevCommandsHandler _reactInstanceCommandsHandler;
private readonly bool _shouldLoadFromPackagerServer;
private readonly string _jsAppBundleName;
private readonly DevInternalSettings _devSettings;
private readonly DevServerHelper _devServerHelper;
private bool _isDevSupportEnabled = true;
private ReactContext _currentReactContext;
private RedBoxDialog _redBoxDialog;
private Action _dismissRedBoxDialog;
private bool _redBoxDialogOpen;
private DevOptionDialog _devOptionsDialog;
private Action _dismissDevOptionsDialog;
private bool _devOptionsDialogOpen;
public DevSupportManager(
IReactInstanceDevCommandsHandler reactInstanceCommandsHandler,
bool shouldLoadFromPackagerServer,
string jsAppBundleName)
{
_reactInstanceCommandsHandler = reactInstanceCommandsHandler;
_shouldLoadFromPackagerServer = shouldLoadFromPackagerServer;
_jsAppBundleName = jsAppBundleName;
_devSettings = new DevInternalSettings(this);
_devServerHelper = new DevServerHelper(_devSettings);
ReloadSettings();
}
public event Action BeforeShowDevOptionsDialog;
public IDeveloperSettings DevSettings
{
get
{
return _devSettings;
}
}
public string DownloadedJavaScriptBundleFile
{
get
{
return JSBundleFileName;
}
}
public bool IsEnabled
{
get
{
return _isDevSupportEnabled;
}
set
{
if (value != _isDevSupportEnabled)
{
_isDevSupportEnabled = value;
ReloadSettings();
}
}
}
public bool IsRemoteDebuggingEnabled
{
get
{
return _devSettings.IsRemoteDebuggingEnabled;
}
set
{
_devSettings.IsRemoteDebuggingEnabled = value;
}
}
public bool IsProgressDialogEnabled
{
get;
set;
} = true;
public string SourceMapUrl
{
get
{
if (_jsAppBundleName == null)
{
return "";
}
return _devServerHelper.GetSourceMapUrl(_jsAppBundleName);
}
}
public string SourceUrl
{
get
{
if (_jsAppBundleName == null)
{
return "";
}
return _devServerHelper.GetSourceUrl(_jsAppBundleName);
}
}
public string JavaScriptBundleUrlForRemoteDebugging
{
get
{
return _devServerHelper.GetJavaScriptBundleUrlForRemoteDebugging(_jsAppBundleName);
}
}
public void HandleException(Exception exception)
{
if (IsEnabled)
{
ShowNewNativeError(exception.Message, exception);
}
else
{
RnLog.Fatal(ReactConstants.RNW, exception, $"Exception caught in top handler");
}
}
public bool HasUpToDateBundleInCache()
{
if (_isDevSupportEnabled && !IsRemoteDebuggingEnabled)
{
#if WINDOWS_UWP
var lastNativeUpdateTime = Windows.ApplicationModel.Package.Current.InstalledDate.UtcDateTime;
var localFolder = ApplicationData.Current.LocalFolder.Path;
#else
return false; /* fix the issue with metro bundle https://github.com/facebook/metro/issues/375
should be removed once it will be fixed */
#pragma warning disable CS0162 // Unreachable code detected remove [remove this once issue above will be fixed]
var lastNativeUpdateTime = File.GetLastWriteTimeUtc(Assembly.GetEntryAssembly().Location);
#pragma warning restore CS0162 // Unreachable code detected
var localFolder = FileSystem.Current.LocalStorage.Path;
#endif
var jsBundleFileName = Path.Combine(localFolder, JSBundleFileName);
if (File.Exists(jsBundleFileName))
{
return File.GetLastWriteTimeUtc(jsBundleFileName) > lastNativeUpdateTime;
}
}
return false;
}
public void ShowNewNativeError(string message, Exception exception)
{
if (exception is JavaScriptException javaScriptException && javaScriptException.JavaScriptStackTrace != null)
{
var stackTrace = StackTraceHelper.ConvertChakraStackTrace(javaScriptException.JavaScriptStackTrace);
ShowNewError(exception.Message, stackTrace, NativeErrorCookie);
}
else
{
ShowNewError(message, StackTraceHelper.ConvertNativeStackTrace(exception), NativeErrorCookie);
}
}
public void ShowNewJavaScriptError(string title, JArray details, int errorCookie)
{
ShowNewError(title, StackTraceHelper.ConvertJavaScriptStackTrace(details), errorCookie);
}
public void UpdateJavaScriptError(string message, JArray details, int errorCookie)
{
DispatcherHelpers.RunOnDispatcher(() =>
{
if (_redBoxDialog == null
|| !_redBoxDialogOpen
|| errorCookie != _redBoxDialog.ErrorCookie)
{
return;
}
_redBoxDialog.Message = message;
_redBoxDialog.StackTrace = StackTraceHelper.ConvertJavaScriptStackTrace(details);
});
}
public void HideRedboxDialog()
{
var dismissRedBoxDialog = _dismissRedBoxDialog;
if (_redBoxDialogOpen && dismissRedBoxDialog != null)
{
dismissRedBoxDialog();
}
}
public void ShowDevOptionsDialog()
{
if (_devOptionsDialog != null || !IsEnabled)
{
return;
}
DispatcherHelpers.RunOnDispatcher(() =>
{
var options = new[]
{
new DevOptionHandler(
"Reload JavaScript",
HandleReloadJavaScript),
new DevOptionHandler(
IsRemoteDebuggingEnabled
? "Stop JS Remote Debugging"
: "Start JS Remote Debugging",
() =>
{
IsRemoteDebuggingEnabled = !IsRemoteDebuggingEnabled;
HandleReloadJavaScript();
}),
new DevOptionHandler(
_devSettings.IsHotModuleReplacementEnabled
? "Disable Hot Reloading"
: "Enable Hot Reloading",
() =>
{
_devSettings.IsHotModuleReplacementEnabled = !_devSettings.IsHotModuleReplacementEnabled;
HandleReloadJavaScript();
}),
new DevOptionHandler(
_devSettings.IsReloadOnJavaScriptChangeEnabled
? "Disable Live Reload"
: "Enable Live Reload",
() =>
_devSettings.IsReloadOnJavaScriptChangeEnabled =
!_devSettings.IsReloadOnJavaScriptChangeEnabled),
new DevOptionHandler(
_devSettings.IsElementInspectorEnabled
? "Hide Inspector"
: "Show Inspector",
() =>
{
_devSettings.IsElementInspectorEnabled = !_devSettings.IsElementInspectorEnabled;
_currentReactContext?
.GetJavaScriptModule<RCTDeviceEventEmitter>()
.emit("toggleElementInspector", null);
}),
};
_devOptionsDialogOpen = true;
_devOptionsDialog = new DevOptionDialog();
_devOptionsDialog.Closed += (_, __) =>
{
_devOptionsDialogOpen = false;
_dismissDevOptionsDialog = null;
_devOptionsDialog = null;
};
foreach (var option in options)
{
_devOptionsDialog.Add(option.Name, option.OnSelect);
}
if (_redBoxDialog != null)
{
_dismissRedBoxDialog();
}
BeforeShowDevOptionsDialog?.Invoke();
#if WINDOWS_UWP
var asyncInfo = _devOptionsDialog.ShowAsync();
_dismissDevOptionsDialog = asyncInfo.Cancel;
foreach (var option in options)
{
option.HideDialog = _dismissDevOptionsDialog;
}
#else
if (Application.Current != null && Application.Current.MainWindow != null && Application.Current.MainWindow.IsLoaded)
{
_devOptionsDialog.Owner = Application.Current.MainWindow;
}
else
{
_devOptionsDialog.Topmost = true;
_devOptionsDialog.WindowStartupLocation = WindowStartupLocation.CenterScreen;
}
_dismissDevOptionsDialog = _devOptionsDialog.Close;
_devOptionsDialog.Show();
foreach (var option in options)
{
option.HideDialog = _dismissDevOptionsDialog;
}
#endif
});
}
private void HideDevOptionsDialog()
{
var dismissDevOptionsDialog = _dismissDevOptionsDialog;
if (_devOptionsDialogOpen && dismissDevOptionsDialog != null)
{
dismissDevOptionsDialog();
}
}
public void OnNewReactContextCreated(ReactContext context)
{
ResetCurrentContext(context);
}
public void OnReactContextDestroyed(ReactContext context)
{
if (context == _currentReactContext)
{
ResetCurrentContext(null);
}
}
public Task<bool> IsPackagerRunningAsync(CancellationToken token)
{
return _devServerHelper.IsPackagerRunningAsync(token);
}
public Task<ReactContext> CreateReactContextFromPackagerAsync(CancellationToken token)
{
DispatcherHelpers.AssertOnDispatcher();
HideRedboxDialog();
HideDevOptionsDialog();
if (IsRemoteDebuggingEnabled)
{
return ReloadJavaScriptInProxyModeAsync(token);
}
else if (_shouldLoadFromPackagerServer)
{
return ReloadJavaScriptFromServerAsync(token);
}
else
{
return _reactInstanceCommandsHandler.CreateReactContextFromBundleAsync(token);
}
}
public void ReloadSettings()
{
if (_isDevSupportEnabled)
{
if (_devSettings.IsReloadOnJavaScriptChangeEnabled)
{
_pollingDisposable.Disposable =
_devServerHelper.StartPollingOnChangeEndpoint(HandleReloadJavaScript);
}
else
{
// Disposes any existing poller
_pollingDisposable.Disposable = Disposable.Empty;
}
}
else
{
if (_redBoxDialog != null)
{
_dismissRedBoxDialog();
}
_pollingDisposable.Disposable = Disposable.Empty;
}
}
public void Dispose()
{
_pollingDisposable.Dispose();
_devServerHelper.Dispose();
}
public async void HandleReloadJavaScript()
{
RnLog.Info(ReactConstants.RNW, $"DevSupportManager: HandleReloadJavaScript - entry");
using (await _reactInstanceCommandsHandler.LockAsync())
{
RnLog.Info(ReactConstants.RNW, $"DevSupportManager: HandleReloadJavaScript - execute");
await CreateReactContextFromPackagerAsync(CancellationToken.None);
RnLog.Info(ReactConstants.RNW, $"DevSupportManager: HandleReloadJavaScript - done");
}
}
private ProgressDialog CreateProgressDialog(string message)
{
if (IsProgressDialogEnabled)
{
var progressDialog = new ProgressDialog("Please wait...", message);
#if !WINDOWS_UWP
if (Application.Current != null && Application.Current.MainWindow != null && Application.Current.MainWindow.IsLoaded)
{
progressDialog.Owner = Application.Current.MainWindow;
}
else
{
progressDialog.Topmost = true;
progressDialog.WindowStartupLocation = WindowStartupLocation.CenterScreen;
}
#endif
return progressDialog;
}
else
{
return null;
}
}
private Action ShowProgressDialog(ProgressDialog progressDialog)
{
#if WINDOWS_UWP
if (CoreApplication.GetCurrentView().CoreWindow == null)
{
// Main UI thread has no CoreWindow, so we can't parent a dialog box
RnLog.Info(ReactConstants.RNW, $"ProgressDialog can't be shown due to the lack of a CoreWindow");
return null;
}
var operation = progressDialog.ShowAsync();
return operation.Cancel;
#else
progressDialog.Show();
return progressDialog.Close;
#endif
}
private void ResetCurrentContext(ReactContext context)
{
if (_currentReactContext == context)
{
return;
}
_currentReactContext = context;
if (_devSettings.IsHotModuleReplacementEnabled && context != null)
{
var uri = new Uri(SourceUrl);
var path = uri.LocalPath.Substring(1); // strip initial slash in path
var host = uri.Host;
var port = uri.Port;
context.GetJavaScriptModule<HMRClient>().enable("windows", path, host, port);
}
}
private void ShowNewError(string message, IStackFrame[] stack, int errorCookie)
{
RnLog.Error(ReactConstants.RNW, $"Showing RedBox with message: {message}");
DispatcherHelpers.RunOnDispatcher(() =>
{
#if WINDOWS_UWP
if (CoreApplication.GetCurrentView().CoreWindow == null)
{
// Main UI thread has no CoreWindow, so we can't parent a dialog box
RnLog.Info(ReactConstants.RNW, $"RedBox can't be shown due to the lack of a CoreWindow");
return;
}
#endif
if (_redBoxDialog == null)
{
_redBoxDialog = new RedBoxDialog(HandleReloadJavaScript);
}
if (_redBoxDialogOpen)
{
return;
}
_redBoxDialogOpen = true;
_redBoxDialog.ErrorCookie = errorCookie;
_redBoxDialog.Message = message;
_redBoxDialog.StackTrace = stack;
_redBoxDialog.Closed += (_, __) =>
{
_redBoxDialogOpen = false;
_dismissRedBoxDialog = null;
_redBoxDialog = null;
};
#if WINDOWS_UWP
var asyncInfo = _redBoxDialog.ShowAsync();
_dismissRedBoxDialog = asyncInfo.Cancel;
#else
if (Application.Current != null && Application.Current.MainWindow != null && Application.Current.MainWindow.IsLoaded)
{
_redBoxDialog.Owner = Application.Current.MainWindow;
}
else
{
_redBoxDialog.Topmost = true;
_redBoxDialog.WindowStartupLocation = WindowStartupLocation.CenterScreen;
}
_dismissRedBoxDialog = _redBoxDialog.Close;
_redBoxDialog.ShowDialog();
#endif
});
}
private async Task DownloadBundleFromPackagerAsync(CancellationToken token)
{
var deleteTemporaryFile = false;
#if WINDOWS_UWP
var temporaryFolder = ApplicationData.Current.TemporaryFolder.Path;
var localFolder = ApplicationData.Current.LocalFolder.Path;
#else
var temporaryFolder = Path.GetTempPath();
var localFolder = FileSystem.Current.LocalStorage.Path;
#endif
var temporaryFilePath = Path.Combine(temporaryFolder, JSBundleFileName);
try
{
using (var stream = File.OpenWrite(temporaryFilePath))
{
deleteTemporaryFile = true;
await _devServerHelper.DownloadBundleFromUrlAsync(_jsAppBundleName, stream, token).ConfigureAwait(false);
}
var localFilePath = Path.Combine(localFolder, JSBundleFileName);
if (File.Exists(localFilePath))
{
File.Delete(localFilePath);
}
File.Move(temporaryFilePath, localFilePath);
deleteTemporaryFile = false;
}
finally
{
if (deleteTemporaryFile)
{
File.Delete(temporaryFilePath);
}
}
}
private async Task<ReactContext> ReloadJavaScriptInProxyModeAsync(CancellationToken token)
{
var webSocketExecutor = default(WebSocketJavaScriptExecutor);
try
{
var progressDialog = CreateProgressDialog("Connecting to remote debugger.");
var dismissed = await RunWithProgressAsync(
async progressToken =>
{
await _devServerHelper.LaunchDevToolsAsync(progressToken).ConfigureAwait(false);
webSocketExecutor = new WebSocketJavaScriptExecutor();
await webSocketExecutor.ConnectAsync(_devServerHelper.WebsocketProxyUrl, progressToken).ConfigureAwait(false);
},
progressDialog,
token);
}
catch (OperationCanceledException)
when (token.IsCancellationRequested)
{
token.ThrowIfCancellationRequested();
}
catch (DebugServerException ex)
{
ShowNewNativeError(ex.Message, ex);
return null;
}
catch (Exception ex)
{
ShowNewNativeError(
"Unable to connect to remote debugger. Did you forget " +
"to start the development server or connect your device?",
ex);
return null;
}
return await _reactInstanceCommandsHandler.CreateReactContextWithRemoteDebuggerAsync(() => webSocketExecutor, token);
}
private async Task<ReactContext> ReloadJavaScriptFromServerAsync(CancellationToken token)
{
try
{
var progressDialog = CreateProgressDialog("Fetching JavaScript bundle.");
var dismissed = await RunWithProgressAsync(
progressToken => DownloadBundleFromPackagerAsync(progressToken),
progressDialog,
token);
if (dismissed)
{
return null;
}
}
catch (OperationCanceledException)
when (token.IsCancellationRequested)
{
token.ThrowIfCancellationRequested();
}
catch (DebugServerException ex)
{
ShowNewNativeError(ex.Message, ex);
return null;
}
catch (Exception ex)
{
ShowNewNativeError(
"Unable to download JS bundle. Did you forget to start " +
"the development server or connect your device?",
ex);
return null;
}
return await _reactInstanceCommandsHandler.CreateReactContextFromCachedPackagerBundleAsync(token);
}
private async Task<bool> RunWithProgressAsync(Func<CancellationToken, Task> asyncAction, ProgressDialog progressDialog, CancellationToken token)
{
DispatcherHelpers.AssertOnDispatcher();
var hideProgress = ShowProgressDialog(progressDialog);
using (var cancellationDisposable = new CancellationDisposable())
using (token.Register(cancellationDisposable.Dispose))
using (hideProgress != null ? (IDisposable)progressDialog.Token.Register(cancellationDisposable.Dispose) : Disposable.Empty)
{
try
{
await asyncAction(cancellationDisposable.Token);
}
catch (OperationCanceledException)
when (progressDialog.Token.IsCancellationRequested)
{
return true;
}
catch (OperationCanceledException)
{
token.ThrowIfCancellationRequested();
throw;
}
finally
{
hideProgress?.Invoke();
}
}
return false;
}
class DevOptionHandler
{
private readonly Action _onSelect;
public DevOptionHandler(string name, Action onSelect)
{
Name = name;
_onSelect = onSelect;
}
public string Name { get; }
public Action HideDialog { get; set; }
public void OnSelect()
{
HideDialog?.Invoke();
_onSelect();
}
}
}
}
| |
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
namespace SharpDX.Direct3D10
{
public partial class EffectVariable
{
/// <summary>
/// Set data.
/// </summary>
/// <param name="data">A reference to the variable.</param>
/// <param name="count">size in bytes of data to write.</param>
/// <returns>
/// Returns one of the following {{Direct3D 10 Return Codes}}.
/// </returns>
/// <remarks>
/// This method does no conversion or type checking; it is therefore a very quick way to access array items.
/// </remarks>
/// <unmanaged>HRESULT ID3D10EffectVariable::SetRawValue([None] void* pData,[None] int Offset,[None] int Count)</unmanaged>
public void SetRawValue(DataStream data, int count)
{
SetRawValue(data.PositionPointer, 0, count);
}
/// <summary>
/// Get data.
/// </summary>
/// <remarks>
/// This method does no conversion or type checking; it is therefore a very quick way to access array items.
/// </remarks>
/// <param name="count">The number of bytes to get. </param>
/// <returns>Returns a <see cref="SharpDX.DataStream"/> filled with the value. </returns>
/// <unmanaged>HRESULT ID3D10EffectVariable::GetRawValue([None] void* pData,[None] int Offset,[None] int Count)</unmanaged>
public DataStream GetRawValue(int count)
{
DataStream stream = new DataStream(count, true, true);
GetRawValue(stream.DataPointer, 0, count);
return stream;
}
/// <summary>
/// Get a scalar variable.
/// </summary>
/// <remarks>
/// AsScalar returns a version of the effect variable that has been specialized to a scalar variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain scalar data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a scalar variable. See <see cref="SharpDX.Direct3D10.EffectScalarVariable"/>. </returns>
/// <unmanaged>ID3D10EffectScalarVariable* ID3D10EffectVariable::AsScalar()</unmanaged>
public SharpDX.Direct3D10.EffectScalarVariable AsScalar()
{
var temp = AsScalar_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a vector variable.
/// </summary>
/// <remarks>
/// AsVector returns a version of the effect variable that has been specialized to a vector variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain vector data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a vector variable. See <see cref="SharpDX.Direct3D10.EffectVectorVariable"/>. </returns>
/// <unmanaged>ID3D10EffectVectorVariable* ID3D10EffectVariable::AsVector()</unmanaged>
public SharpDX.Direct3D10.EffectVectorVariable AsVector()
{
var temp = AsVector_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a matrix variable.
/// </summary>
/// <remarks>
/// AsMatrix returns a version of the effect variable that has been specialized to a matrix variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain matrix data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a matrix variable. See <see cref="SharpDX.Direct3D10.EffectMatrixVariable"/>. </returns>
/// <unmanaged>ID3D10EffectMatrixVariable* ID3D10EffectVariable::AsMatrix()</unmanaged>
public SharpDX.Direct3D10.EffectMatrixVariable AsMatrix()
{
var temp = AsMatrix_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a string variable.
/// </summary>
/// <remarks>
/// AsString returns a version of the effect variable that has been specialized to a string variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain string data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a string variable. See <see cref="SharpDX.Direct3D10.EffectStringVariable"/>. </returns>
/// <unmanaged>ID3D10EffectStringVariable* ID3D10EffectVariable::AsString()</unmanaged>
public SharpDX.Direct3D10.EffectStringVariable AsString()
{
var temp = AsString_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a shader-resource variable.
/// </summary>
/// <remarks>
/// AsShaderResource returns a version of the effect variable that has been specialized to a shader-resource variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain shader-resource data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a shader-resource variable. See <see cref="SharpDX.Direct3D10.EffectShaderResourceVariable"/>. </returns>
/// <unmanaged>ID3D10EffectShaderResourceVariable* ID3D10EffectVariable::AsShaderResource()</unmanaged>
public SharpDX.Direct3D10.EffectShaderResourceVariable AsShaderResource()
{
var temp = AsShaderResource_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a render-target-view variable.
/// </summary>
/// <remarks>
/// This method returns a version of the effect variable that has been specialized to a render-target-view variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain render-target-view data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a render-target-view variable. See <see cref="SharpDX.Direct3D10.EffectRenderTargetViewVariable"/>. </returns>
/// <unmanaged>ID3D10EffectRenderTargetViewVariable* ID3D10EffectVariable::AsRenderTargetView()</unmanaged>
public SharpDX.Direct3D10.EffectRenderTargetViewVariable AsRenderTargetView()
{
var temp = AsRenderTargetView_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a depth-stencil-view variable.
/// </summary>
/// <remarks>
/// This method returns a version of the effect variable that has been specialized to a depth-stencil-view variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain depth-stencil-view data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a depth-stencil-view variable. See <see cref="SharpDX.Direct3D10.EffectDepthStencilViewVariable"/>. </returns>
/// <unmanaged>ID3D10EffectDepthStencilViewVariable* ID3D10EffectVariable::AsDepthStencilView()</unmanaged>
public SharpDX.Direct3D10.EffectDepthStencilViewVariable AsDepthStencilView()
{
var temp = AsDepthStencilView_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a constant buffer.
/// </summary>
/// <remarks>
/// AsConstantBuffer returns a version of the effect variable that has been specialized to a constant buffer. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain constant buffer data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a constant buffer. See <see cref="SharpDX.Direct3D10.EffectConstantBuffer"/>. </returns>
/// <unmanaged>ID3D10EffectConstantBuffer* ID3D10EffectVariable::AsConstantBuffer()</unmanaged>
public SharpDX.Direct3D10.EffectConstantBuffer AsConstantBuffer()
{
var temp = AsConstantBuffer_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a shader variable.
/// </summary>
/// <remarks>
/// AsShader returns a version of the effect variable that has been specialized to a shader variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain shader data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a shader variable. See <see cref="SharpDX.Direct3D10.EffectShaderVariable"/>. </returns>
/// <unmanaged>ID3D10EffectShaderVariable* ID3D10EffectVariable::AsShader()</unmanaged>
public SharpDX.Direct3D10.EffectShaderVariable AsShader()
{
var temp = AsShader_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a effect-blend variable.
/// </summary>
/// <remarks>
/// AsBlend returns a version of the effect variable that has been specialized to an effect-blend variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain effect-blend data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to an effect blend variable. See <see cref="SharpDX.Direct3D10.EffectBlendVariable"/>. </returns>
/// <unmanaged>ID3D10EffectBlendVariable* ID3D10EffectVariable::AsBlend()</unmanaged>
public SharpDX.Direct3D10.EffectBlendVariable AsBlend()
{
var temp = AsBlend_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a depth-stencil variable.
/// </summary>
/// <remarks>
/// AsDepthStencil returns a version of the effect variable that has been specialized to a depth-stencil variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain depth-stencil data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a depth-stencil variable. See <see cref="SharpDX.Direct3D10.EffectDepthStencilVariable"/>. </returns>
/// <unmanaged>ID3D10EffectDepthStencilVariable* ID3D10EffectVariable::AsDepthStencil()</unmanaged>
public SharpDX.Direct3D10.EffectDepthStencilVariable AsDepthStencil()
{
var temp = AsDepthStencil_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a rasterizer variable.
/// </summary>
/// <remarks>
/// AsRasterizer returns a version of the effect variable that has been specialized to a rasterizer variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain rasterizer data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a rasterizer variable. See <see cref="SharpDX.Direct3D10.EffectRasterizerVariable"/>. </returns>
/// <unmanaged>ID3D10EffectRasterizerVariable* ID3D10EffectVariable::AsRasterizer()</unmanaged>
public SharpDX.Direct3D10.EffectRasterizerVariable AsRasterizer()
{
var temp = AsRasterizer_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
/// <summary>
/// Get a sampler variable.
/// </summary>
/// <remarks>
/// AsSampler returns a version of the effect variable that has been specialized to a sampler variable. Similar to a cast, this specialization will return an invalid object if the effect variable does not contain sampler data. Applications can test the returned object for validity by calling {{IsValid}}.
/// </remarks>
/// <returns>A reference to a sampler variable. See <see cref="SharpDX.Direct3D10.EffectSamplerVariable"/>. </returns>
/// <unmanaged>ID3D10EffectSamplerVariable* ID3D10EffectVariable::AsSampler()</unmanaged>
public SharpDX.Direct3D10.EffectSamplerVariable AsSampler()
{
var temp = AsSampler_();
if (temp == null || !temp.IsValid)
return null;
return temp;
}
}
}
| |
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Configuration;
using System.Data;
using System.Threading;
using System.Xml;
using System.Xml.Serialization;
using System.Diagnostics;
using Netron.UI;
using Netron;
using QuickGraph;
using QuickGraph.Collections;
using QuickGraph.Concepts;
using QuickGraph.Concepts.Traversals;
using QuickGraph.Representations;
using QuickGraph.Algorithms.Graphviz;
using QuickGraph.Algorithms.RandomWalks;
using QuickGraph.Layout.Providers;
using QuickGraph.Layout.ConnectorChoosers;
using QuickGraph.Layout.Shapes;
using QuickGraph.Algorithms;
using QuickGraph.Algorithms.Search;
using QuickGraph.Serialization;
using QuickGraph.Providers;
using QuickGraph.Layout.Forms;
using QuickGraph.Layout.Connections;
namespace QuickGraph.Layout.GUI
{
/// <summary>
/// Summary description for Form1.
/// </summary>
public class MainForm : System.Windows.Forms.Form
{
private VertexColorDictionary vertexColors = null;
private EdgeColorDictionary edgeColors = null;
private VertexIntDictionary vertexCounts = null;
private EdgeIntDictionary edgeCounts = null;
private EdgeDoubleDictionary edgeWeights = null;
private Netron.NetronDomain doc = null;
private System.ComponentModel.IContainer components;
private System.Windows.Forms.StatusBar statusBar;
private System.Windows.Forms.StatusBarPanel messageBarPanel;
private System.Windows.Forms.ToolBar toolBar;
private System.Windows.Forms.MainMenu mainMenu;
private System.Windows.Forms.MenuItem menuItem2;
private System.Windows.Forms.MenuItem depthFirstSearchAlgorithmItem;
private System.Windows.Forms.MenuItem menuItem5;
private System.Windows.Forms.MenuItem breadthFirstSearchItem;
private System.Windows.Forms.MenuItem edgeDepthFirstSearchItem;
private System.Windows.Forms.MenuItem menuItem3;
private System.Windows.Forms.MenuItem menuItem4;
private System.Windows.Forms.ImageList imageList1;
private System.Windows.Forms.MenuItem menuItem6;
private DockingManagerExtender.DockingManagerExtender dockingManagerExtender1;
private QuickGraph.Layout.Forms.QuickNetronPanel netronPanel;
private Netron.UI.NetronOverview netronOverview1;
private System.Windows.Forms.MenuItem menuItem1;
private System.Windows.Forms.MenuItem menuItem7;
private System.Windows.Forms.MenuItem menuItem8;
private System.Windows.Forms.StatusBarPanel errorBarPanel;
public MainForm()
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
this.doc = new NetronDomain();
this.doc.ConfigurationFileName = ConfigurationSettings.AppSettings["NetronConfig"];
this.doc.LoadConfiguration();
// this.netronPalette.Document = this.doc;
this.netronOverview1.Panel = this.netronPanel;
// this.netronProperties.Panel = this.netronPanel;
this.netronPanel.Domain = this.doc;
this.netronPanel.Graph =
new BidirectionalGraph(
new TypedVertexProvider(typeof(SerializableVertex)),
new TypedEdgeProvider(typeof(SerializableEdge)),
true
);
this.netronPanel.EntitySelected +=new StatusEventHandler(netronPanel_SelectEvent);
this.netronPanel.BackColor = Color.LightBlue;
OnNew(this,new EventArgs());
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose( bool disposing )
{
if( disposing )
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(MainForm));
this.mainMenu = new System.Windows.Forms.MainMenu();
this.menuItem3 = new System.Windows.Forms.MenuItem();
this.menuItem4 = new System.Windows.Forms.MenuItem();
this.menuItem6 = new System.Windows.Forms.MenuItem();
this.menuItem2 = new System.Windows.Forms.MenuItem();
this.menuItem5 = new System.Windows.Forms.MenuItem();
this.depthFirstSearchAlgorithmItem = new System.Windows.Forms.MenuItem();
this.edgeDepthFirstSearchItem = new System.Windows.Forms.MenuItem();
this.breadthFirstSearchItem = new System.Windows.Forms.MenuItem();
this.menuItem1 = new System.Windows.Forms.MenuItem();
this.menuItem7 = new System.Windows.Forms.MenuItem();
this.menuItem8 = new System.Windows.Forms.MenuItem();
this.statusBar = new System.Windows.Forms.StatusBar();
this.messageBarPanel = new System.Windows.Forms.StatusBarPanel();
this.errorBarPanel = new System.Windows.Forms.StatusBarPanel();
this.toolBar = new System.Windows.Forms.ToolBar();
this.imageList1 = new System.Windows.Forms.ImageList(this.components);
this.dockingManagerExtender1 = new DockingManagerExtender.DockingManagerExtender(this.components);
this.netronPanel = new QuickGraph.Layout.Forms.QuickNetronPanel(this.components);
this.netronOverview1 = new Netron.UI.NetronOverview();
((System.ComponentModel.ISupportInitialize)(this.messageBarPanel)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.errorBarPanel)).BeginInit();
this.netronPanel.SuspendLayout();
this.SuspendLayout();
//
// mainMenu
//
this.mainMenu.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.menuItem3,
this.menuItem2});
//
// menuItem3
//
this.menuItem3.Index = 0;
this.menuItem3.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.menuItem4,
this.menuItem6});
this.menuItem3.Text = "File";
//
// menuItem4
//
this.menuItem4.Index = 0;
this.menuItem4.Text = "Load GraphML";
this.menuItem4.Click += new System.EventHandler(this.menuItem4_Click);
//
// menuItem6
//
this.menuItem6.Index = 1;
this.menuItem6.Text = "Load GXL";
//
// menuItem2
//
this.menuItem2.Index = 1;
this.menuItem2.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.menuItem5,
this.menuItem1});
this.menuItem2.Text = "Algorithms";
//
// menuItem5
//
this.menuItem5.Index = 0;
this.menuItem5.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.depthFirstSearchAlgorithmItem,
this.edgeDepthFirstSearchItem,
this.breadthFirstSearchItem});
this.menuItem5.Text = "Search";
//
// depthFirstSearchAlgorithmItem
//
this.depthFirstSearchAlgorithmItem.Index = 0;
this.depthFirstSearchAlgorithmItem.Text = "DepthFirstSearchAlgorithm";
this.depthFirstSearchAlgorithmItem.Click += new System.EventHandler(this.depthFirstSearchAlgorithmItem_Click);
//
// edgeDepthFirstSearchItem
//
this.edgeDepthFirstSearchItem.Index = 1;
this.edgeDepthFirstSearchItem.Text = "EdgeDepthFirstSearchAlgorithm";
this.edgeDepthFirstSearchItem.Click += new System.EventHandler(this.edgeDepthFirstSearchItem_Click);
//
// breadthFirstSearchItem
//
this.breadthFirstSearchItem.Index = 2;
this.breadthFirstSearchItem.Text = "BreadthFirstSearchAlgorithm";
this.breadthFirstSearchItem.Click += new System.EventHandler(this.breadthFirstSearchItem_Click);
//
// menuItem1
//
this.menuItem1.Index = 1;
this.menuItem1.MenuItems.AddRange(new System.Windows.Forms.MenuItem[] {
this.menuItem7,
this.menuItem8});
this.menuItem1.Text = "Walks";
//
// menuItem7
//
this.menuItem7.Index = 0;
this.menuItem7.Text = "Uniform walk";
this.menuItem7.Click += new System.EventHandler(this.menuItem7_Click);
//
// menuItem8
//
this.menuItem8.Index = 1;
this.menuItem8.Text = "Weighted walk";
this.menuItem8.Click += new System.EventHandler(this.menuItem8_Click);
//
// statusBar
//
this.statusBar.Location = new System.Drawing.Point(0, 403);
this.statusBar.Name = "statusBar";
this.statusBar.Panels.AddRange(new System.Windows.Forms.StatusBarPanel[] {
this.messageBarPanel,
this.errorBarPanel});
this.statusBar.Size = new System.Drawing.Size(584, 22);
this.statusBar.TabIndex = 0;
this.statusBar.Text = "Status Bar";
//
// messageBarPanel
//
this.messageBarPanel.Text = "Message";
//
// errorBarPanel
//
this.errorBarPanel.Alignment = System.Windows.Forms.HorizontalAlignment.Right;
this.errorBarPanel.Text = "Error";
this.errorBarPanel.Width = 20;
//
// toolBar
//
this.toolBar.ButtonSize = new System.Drawing.Size(16, 16);
this.toolBar.DropDownArrows = true;
this.toolBar.ImageList = this.imageList1;
this.toolBar.Location = new System.Drawing.Point(0, 0);
this.toolBar.Name = "toolBar";
this.toolBar.ShowToolTips = true;
this.toolBar.Size = new System.Drawing.Size(584, 22);
this.toolBar.TabIndex = 1;
//
// imageList1
//
this.imageList1.ColorDepth = System.Windows.Forms.ColorDepth.Depth32Bit;
this.imageList1.ImageSize = new System.Drawing.Size(32, 32);
this.imageList1.TransparentColor = System.Drawing.Color.Transparent;
//
// dockingManagerExtender1
//
this.dockingManagerExtender1.AutomaticStatePersistence = false;
this.dockingManagerExtender1.ContainerControl = this;
this.dockingManagerExtender1.InnerControl = null;
this.dockingManagerExtender1.OuterControl = null;
this.dockingManagerExtender1.PlainTabBorder = false;
this.dockingManagerExtender1.VisualStyle = Crownwood.Magic.Common.VisualStyle.IDE;
//
// netronPanel
//
this.dockingManagerExtender1.SetADockingEnable(this.netronPanel, false);
this.netronPanel.BackColor = System.Drawing.Color.White;
this.netronPanel.BorderStyle = System.Windows.Forms.BorderStyle.Fixed3D;
this.dockingManagerExtender1.SetCloseButton(this.netronPanel, false);
this.dockingManagerExtender1.SetCloseOnHide(this.netronPanel, false);
this.netronPanel.Controls.Add(this.netronOverview1);
this.netronPanel.DataUpdateInterval = 50;
this.netronPanel.Dock = System.Windows.Forms.DockStyle.Fill;
this.dockingManagerExtender1.SetDockingStyle(this.netronPanel, System.Windows.Forms.DockStyle.Left);
this.netronPanel.Domain = null;
this.dockingManagerExtender1.SetFullTitle(this.netronPanel, "netronPanel");
this.netronPanel.Graph = null;
this.dockingManagerExtender1.SetIcon(this.netronPanel, null);
this.netronPanel.Location = new System.Drawing.Point(0, 22);
this.netronPanel.Name = "netronPanel";
this.netronPanel.Size = new System.Drawing.Size(584, 381);
this.dockingManagerExtender1.SetTabbedMode(this.netronPanel, true);
this.netronPanel.TabIndex = 4;
this.dockingManagerExtender1.SetTitle(this.netronPanel, "netronPanel");
this.netronPanel.Zoom = 1F;
//
// netronOverview1
//
this.dockingManagerExtender1.SetADockingEnable(this.netronOverview1, true);
this.dockingManagerExtender1.SetCloseButton(this.netronOverview1, true);
this.dockingManagerExtender1.SetCloseOnHide(this.netronOverview1, false);
this.dockingManagerExtender1.SetDockingStyle(this.netronOverview1, System.Windows.Forms.DockStyle.Left);
this.dockingManagerExtender1.SetFullTitle(this.netronOverview1, "NetronOverview");
this.dockingManagerExtender1.SetIcon(this.netronOverview1, null);
this.netronOverview1.Location = new System.Drawing.Point(24, 16);
this.netronOverview1.Name = "netronOverview1";
this.netronOverview1.Panel = null;
this.netronOverview1.Size = new System.Drawing.Size(224, 248);
this.dockingManagerExtender1.SetTabbedMode(this.netronOverview1, true);
this.netronOverview1.TabIndex = 0;
this.dockingManagerExtender1.SetTitle(this.netronOverview1, "NetronOverview");
this.netronOverview1.Zoom = 0.2F;
//
// MainForm
//
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.ClientSize = new System.Drawing.Size(584, 425);
this.Controls.Add(this.netronPanel);
this.Controls.Add(this.toolBar);
this.Controls.Add(this.statusBar);
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.Menu = this.mainMenu;
this.Name = "MainForm";
this.Text = "QuickGraph - Netron - Test Application";
((System.ComponentModel.ISupportInitialize)(this.messageBarPanel)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.errorBarPanel)).EndInit();
this.netronPanel.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
protected void OnNew(Object sender, EventArgs e)
{
MainForm mf = (MainForm)sender;
mf.netronPanel.Initialize();
Refresh();
}
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.Run(new MainForm());
}
private void ResetVertexAndEdgeColors()
{
if (this.netronPanel.Graph==null)
throw new Exception("Generate a graph first");
foreach(BasicShape s in this.netronPanel.Populator.ShapeVertices.Keys)
{
s.ResetColors();
}
foreach(Connection c in this.netronPanel.Populator.ConnectionEdges.Keys)
{
c.StrokeColor = Color.Black;
}
}
private void depthFirstSearchAlgorithmItem_Click(object sender, System.EventArgs e)
{
if (this.netronPanel.Graph==null)
throw new Exception("Generate a graph first");
// clear colors
ResetVertexAndEdgeColors();
// create algorithm
this.edgeColors=new EdgeColorDictionary();
foreach(IEdge edge in this.netronPanel.Graph.Edges)
this.edgeColors[edge]=GraphColor.White;
this.vertexColors = new VertexColorDictionary();
DepthFirstSearchAlgorithm dfs = new DepthFirstSearchAlgorithm(
this.netronPanel.Graph,
this.vertexColors);
// create tracer
LayoutAlgorithmTraverVisitor tracer = new LayoutAlgorithmTraverVisitor(this.netronPanel.Populator);
// link to algo
dfs.RegisterTreeEdgeBuilderHandlers(tracer);
dfs.RegisterVertexColorizerHandlers(tracer);
dfs.TreeEdge +=new EdgeEventHandler(dfs_TreeEdge);
dfs.BackEdge +=new EdgeEventHandler(dfs_BackEdge);
dfs.ForwardOrCrossEdge +=new EdgeEventHandler(dfs_ForwardOrCrossEdge);
// add handler to tracers
tracer.UpdateVertex +=new ShapeVertexEventHandler(tracer_UpdateVertex);
tracer.UpdateEdge +=new ConnectionEdgeEventHandler(tracer_UpdateEdge);
// running algorithm
Thread thread = new Thread(new ThreadStart(dfs.Compute));
thread.Start();
}
private void edgeDepthFirstSearchItem_Click(object sender, System.EventArgs e)
{
if (this.netronPanel.Graph==null)
throw new Exception("Generate a graph first");
if (this.netronPanel.Populator==null)
throw new Exception("Populator should not be null.");
ResetVertexAndEdgeColors();
// create algorithm
this.vertexColors=null;
this.edgeColors = new EdgeColorDictionary();
EdgeDepthFirstSearchAlgorithm edfs =
new EdgeDepthFirstSearchAlgorithm(this.netronPanel.Graph,this.edgeColors);
// create tracer
LayoutAlgorithmTraverVisitor tracer = new LayoutAlgorithmTraverVisitor(this.netronPanel.Populator);
// link to algo
edfs.RegisterTreeEdgeBuilderHandlers(tracer);
edfs.RegisterEdgeColorizerHandlers(tracer);
// add handler to tracers
tracer.UpdateVertex +=new ShapeVertexEventHandler(tracer_UpdateVertex);
tracer.UpdateEdge +=new ConnectionEdgeEventHandler(tracer_UpdateEdge);
// running algorithm
Thread thread = new Thread(new ThreadStart(edfs.Compute));
thread.Start();
}
private void dfs_TreeEdge(object sender, EdgeEventArgs e)
{
if (this.edgeColors==null || !this.edgeColors.Contains(e.Edge))
return;
this.edgeColors[e.Edge]= GraphColor.Gray;
}
private void dfs_BackEdge(object sender, EdgeEventArgs e)
{
if (this.edgeColors==null || !this.edgeColors.Contains(e.Edge))
return;
this.edgeColors[e.Edge] = GraphColor.Black;
}
private void dfs_ForwardOrCrossEdge(object sender, EdgeEventArgs e)
{
if (this.edgeColors==null || !this.edgeColors.Contains(e.Edge))
return;
this.edgeColors[e.Edge] = GraphColor.Black;
}
private void breadthFirstSearchItem_Click(object sender, System.EventArgs e)
{
if (this.netronPanel.Graph==null)
throw new Exception("Generate a graph first");
if (this.netronPanel.Populator==null)
throw new Exception("Populator should not be null.");
ResetVertexAndEdgeColors();
// create algorithm
this.edgeColors=new EdgeColorDictionary();
foreach(IEdge edge in this.netronPanel.Graph.Edges)
this.edgeColors[edge]=GraphColor.White;
this.vertexColors = new VertexColorDictionary();
BreadthFirstSearchAlgorithm bfs = new BreadthFirstSearchAlgorithm(
this.netronPanel.Graph,
new VertexBuffer(),
this.vertexColors);
// create tracer
LayoutAlgorithmTraverVisitor tracer = new LayoutAlgorithmTraverVisitor(this.netronPanel.Populator);
// link to algo
bfs.RegisterTreeEdgeBuilderHandlers(tracer);
bfs.RegisterVertexColorizerHandlers(tracer);
bfs.TreeEdge +=new EdgeEventHandler(dfs_TreeEdge);
bfs.NonTreeEdge+=new EdgeEventHandler(dfs_BackEdge);
bfs.BlackTarget +=new EdgeEventHandler(dfs_ForwardOrCrossEdge);
// add handler to tracers
tracer.UpdateVertex +=new ShapeVertexEventHandler(tracer_UpdateVertex);
tracer.UpdateEdge +=new ConnectionEdgeEventHandler(tracer_UpdateEdge);
// running algorithm
VertexMethodCaller vm=
new VertexMethodCaller(
new ComputeVertexDelegate(bfs.Compute),
Traversal.FirstVertex(this.netronPanel.Graph)
);
Thread thread = new Thread(new ThreadStart(vm.Run));
thread.Start();
}
private void tracer_UpdateVertex(object sender, ShapeVertexEventArgs args)
{
// shape is TextShape
if (this.vertexColors==null || !this.vertexColors.Contains(args.Vertex))
return;
PropertyGridShape shape = (PropertyGridShape)args.Shape;
switch(this.vertexColors[args.Vertex])
{
case GraphColor.White:
shape.TitleBackColor = Color.White;
break;
case GraphColor.Gray:
shape.TitleBackColor = Color.LightGray;
break;
case GraphColor.Black:
shape.TitleBackColor = Color.Red;
break;
}
}
private void tracer_UpdateEdge(object sender, ConnectionEdgeEventArgs args)
{
// shape is TextShape
if (this.edgeColors==null || !this.edgeColors.Contains(args.Edge))
return;
Connection conn = (Connection)args.Conn;
switch(this.edgeColors[args.Edge])
{
case GraphColor.White:
conn.StrokeColor = Color.Green;
break;
case GraphColor.Gray:
conn.StrokeColor = Color.Black;
break;
case GraphColor.Black:
conn.StrokeColor = Color.Red;
break;
}
}
private void netronPanel_SelectEvent(object sender, Netron.StatusEventArgs e)
{
if (e.Entity is PropertyGridShape)
{
PropertyGridShape shape = (PropertyGridShape)e.Entity;
switch(e.Status)
{
case EnumStatusType.Selected:
shape.CollapseRows=false;
this.netronPanel.MoveToFront(shape);
break;
case EnumStatusType.Deselected:
shape.CollapseRows=true;
break;
}
}
}
private void menuItem4_Click(object sender, System.EventArgs e)
{
OpenFileDialog dlg = new OpenFileDialog();
dlg.Multiselect = false;
dlg.DefaultExt = ".xml";
dlg.Title="Load GraphML file";
if(dlg.ShowDialog() != DialogResult.OK)
return;
this.netronPanel.Clear();
// create serialize
GraphMLGraphSerializer ser = new GraphMLGraphSerializer(".");
ser.GraphType = typeof(BidirectionalGraph);
ser.VertexProviderType = typeof(SerializableVertexProvider);
ser.EdgeProviderType = typeof(SerializableEdgeProvider);
try
{
XmlReader reader = new XmlTextReader(dlg.FileName);
// validate
GraphMLGraphSerializer.Validate(reader);
reader = new XmlTextReader(dlg.FileName);
this.netronPanel.Graph = (BidirectionalGraph)ser.Deserialize(reader);
}
catch(Exception ex)
{
Debug.Write(ex.ToString());
Debug.Flush();
throw;
}
this.netronPanel.Populator.PopulatePanel(this.netronPanel.Graphics);
foreach(SerializableVertex v in this.netronPanel.Graph.Vertices)
{
PropertyGridShape shape = (PropertyGridShape)this.netronPanel.Populator.VertexShapes[v];
if (v.Entries.ContainsKey("name"))
shape.Title = v.Entries["name"];
else
shape.Title = v.ID.ToString();
if (v.Entries.ContainsKey("icon"))
{
try
{
shape.Icon = new Icon(v.Entries["icon"]);
}
catch(Exception)
{}
}
// add some properties
foreach(DictionaryEntry de in v.Entries)
{
if (de.Key.ToString()=="name" || de.Key.ToString()=="icon")
continue;
shape.Rows.Add(new PropertyEntry(de.Key.ToString(),de.Value.ToString()));
}
}
foreach(SerializableEdge edge in this.netronPanel.Graph.Edges)
{
if (edge.Entries.ContainsKey("name"))
{
SplineConnection conn = (SplineConnection)this.netronPanel.Populator.EdgeConnections[edge];
conn.Label = edge.Entries["name"];
}
}
this.netronPanel.Populator.PopulatePanel(this.netronPanel.Graphics);
}
private void menuItem7_Click(object sender, System.EventArgs e)
{
if (this.netronPanel.Graph==null)
throw new Exception("Generate a graph first");
if (this.netronPanel.Populator==null)
throw new Exception("Populator should not be null.");
ResetVertexAndEdgeColors();
// create algorithm
this.vertexCounts = new VertexIntDictionary();
this.edgeCounts = new EdgeIntDictionary();
foreach(IVertex vertex in this.netronPanel.Graph.Vertices)
this.vertexCounts[vertex]=0;
foreach(IEdge edge in this.netronPanel.Graph.Edges)
this.edgeCounts[edge]=0;
RandomWalkAlgorithm walker = new RandomWalkAlgorithm(
this.netronPanel.Graph
);
walker.TreeEdge +=new EdgeEventHandler(walker_TreeEdge);
LayoutAlgorithmTraverVisitor tracer = new LayoutAlgorithmTraverVisitor(this.netronPanel.Populator);
walker.TreeEdge +=new EdgeEventHandler(tracer.TreeEdge);
Thread thread = new Thread(new ThreadStart(walker.Generate));
thread.Start();
}
private void walker_TreeEdge(object sender, EdgeEventArgs e)
{
this.vertexCounts[e.Edge.Target]++;
this.edgeCounts[e.Edge]++;
PropertyGridShape shape =
(PropertyGridShape)this.netronPanel.Populator.VertexShapes[e.Edge.Source];
shape.TitleBackColor = Color.White;
shape.Invalidate();
shape =
(PropertyGridShape)this.netronPanel.Populator.VertexShapes[e.Edge.Target];
shape.TitleBackColor = Color.LightGreen;
shape.Title = this.vertexCounts[e.Edge.Target].ToString();
shape.Invalidate();
}
private void walker_WeightedTreeEdge(object sender, EdgeEventArgs e)
{
this.vertexCounts[e.Edge.Target]++;
this.edgeCounts[e.Edge]++;
this.edgeWeights[e.Edge]*=0.9;
PropertyGridShape shape =
(PropertyGridShape)this.netronPanel.Populator.VertexShapes[e.Edge.Source];
shape.TitleBackColor = Color.White;
shape =
(PropertyGridShape)this.netronPanel.Populator.VertexShapes[e.Edge.Target];
shape.TitleBackColor = Color.LightGreen;
shape.Title = this.vertexCounts[e.Edge.Target].ToString();
this.netronPanel.Invalidate();
}
private void menuItem8_Click(object sender, System.EventArgs e)
{
if (this.netronPanel.Graph==null)
throw new Exception("Generate a graph first");
if (this.netronPanel.Populator==null)
throw new Exception("Populator should not be null.");
ResetVertexAndEdgeColors();
// create algorithm
this.vertexCounts = new VertexIntDictionary();
this.edgeCounts = new EdgeIntDictionary();
foreach(IVertex vertex in this.netronPanel.Graph.Vertices)
this.vertexCounts[vertex]=0;
foreach(IEdge edge in this.netronPanel.Graph.Edges)
this.edgeCounts[edge]=0;
this.edgeWeights =new EdgeDoubleDictionary();
foreach(IEdge edge in this.netronPanel.Graph.Edges)
edgeWeights[edge]=1;
WeightedMarkovEdgeChain chain = new WeightedMarkovEdgeChain(edgeWeights);
RandomWalkAlgorithm walker = new RandomWalkAlgorithm(
this.netronPanel.Graph
);
walker.TreeEdge+=new EdgeEventHandler(walker_WeightedTreeEdge);
LayoutAlgorithmTraverVisitor tracer = new LayoutAlgorithmTraverVisitor(this.netronPanel.Populator);
walker.TreeEdge +=new EdgeEventHandler(tracer.TreeEdge);
Thread thread = new Thread(new ThreadStart(walker.Generate));
thread.Start();
}
}
}
| |
/***************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
***************************************************************************/
using Microsoft.Samples.VisualStudio.CodeSweep.Properties;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace Microsoft.Samples.VisualStudio.CodeSweep
{
/// <summary>
/// General utility methods.
/// </summary>
public static class Utilities
{
/// <summary>
/// Concatenates a collection of strings.
/// </summary>
/// <param name="inputs">The strings to concatenate.</param>
/// <param name="separator">The separator text that will be placed in between the individual strings.</param>
static public string Concatenate(IEnumerable<string> inputs, string separator)
{
StringBuilder result = new StringBuilder();
foreach (string input in inputs)
{
if (result.Length > 0)
{
result.Append(separator);
}
result.Append(input);
}
return result.ToString();
}
/// <summary>
/// "Escapes" all instances of the specified character by inserting backslashes before
/// them. In addition, backslashes are transformed to double-backslashes.
/// </summary>
public static string EscapeChar(string text, char toEscape)
{
if (text == null)
{
throw new ArgumentNullException("text");
}
StringBuilder result = new StringBuilder();
int spanStart = 0;
char[] chars = new char[] { toEscape, '\\' };
for (int spanStop = text.IndexOfAny(chars, spanStart); spanStop >= 0; spanStop = text.IndexOfAny(chars, spanStart))
{
result.Append(text.Substring(spanStart, spanStop - spanStart));
result.Append("\\");
result.Append(text[spanStop]);
spanStart = spanStop + 1;
}
result.Append(text.Substring(spanStart));
return result.ToString();
}
/// <summary>
/// Splits a string into several fields.
/// </summary>
/// <param name="text"></param>
/// <param name="separator"></param>
/// <returns></returns>
/// <remarks>
/// Instances of <c>separator</c> alone are treated as field separators. Escaped instances
/// of <c>separator</c> (prefixed by backslashes) are unescaped, as are double-backslashes.
/// </remarks>
public static IList<string> ParseEscaped(string text, char separator)
{
List<string> result = new List<string>();
StringBuilder current = new StringBuilder();
char[] chars = new char[] { separator, '\\' };
int spanStart = 0;
for (int spanStop = text.IndexOfAny(chars, spanStart); spanStop >= 0; spanStop = text.IndexOfAny(chars, spanStart))
{
current.Append(text.Substring(spanStart, spanStop - spanStart));
if (text[spanStop] == separator)
{
// This is a separator on its own, since it would already have been dealt with
// if it had been preceeded by an escape operator.
result.Add(current.ToString());
current.Length = 0;
}
else
{
// We found an instance of the escape operator, '\'
if (spanStop + 1 < text.Length)
{
if (text[spanStop + 1] == separator)
{
// An escaped separator is transformed into a non-escaped separator.
current.Append(separator);
++spanStop;
}
else if (text[spanStop + 1] == '\\')
{
// A double-escape is transformed into the escape operator.
current.Append('\\');
++spanStop;
}
}
}
spanStart = spanStop + 1;
}
if (spanStart < text.Length)
{
current.Append(text.Substring(spanStart));
}
if (current.Length > 0)
{
result.Add(current.ToString());
}
return result;
}
/// <summary>
/// Transforms a relative path to an absolute one based on a specified base folder.
/// </summary>
static public string AbsolutePathFromRelative(string relativePath, string baseFolderForDerelativization)
{
if (relativePath == null)
{
throw new ArgumentNullException("relativePath");
}
if (baseFolderForDerelativization == null)
{
throw new ArgumentNullException("baseFolderForDerelativization");
}
if (Path.IsPathRooted(relativePath))
{
throw new ArgumentException(Resources.PathNotRelative, "relativePath");
}
if (!Path.IsPathRooted(baseFolderForDerelativization))
{
throw new ArgumentException(Resources.BaseFolderMustBeRooted, "baseFolderForDerelativization");
}
StringBuilder result = new StringBuilder(baseFolderForDerelativization);
if (result[result.Length - 1] != Path.DirectorySeparatorChar)
{
result.Append(Path.DirectorySeparatorChar);
}
int spanStart = 0;
while (spanStart < relativePath.Length)
{
int spanStop = relativePath.IndexOf(Path.DirectorySeparatorChar, spanStart);
if (spanStop == -1)
{
spanStop = relativePath.Length;
}
string span = relativePath.Substring(spanStart, spanStop - spanStart);
if (span == "..")
{
// The result string should end with a directory separator at this point. We
// want to search for the one previous to that, which is why we subtract 2.
int previousSeparator;
if (result.Length < 2 || (previousSeparator = result.ToString().LastIndexOf(Path.DirectorySeparatorChar, result.Length - 2)) == -1)
{
throw new ArgumentException(Resources.BackTooFar);
}
result.Remove(previousSeparator + 1, result.Length - previousSeparator - 1);
}
else if (span != ".")
{
// Ignore "." because it means the current direcotry
result.Append(span);
if (spanStop < relativePath.Length)
{
result.Append(Path.DirectorySeparatorChar);
}
}
spanStart = spanStop + 1;
}
return result.ToString();
}
/// <summary>
/// Enumerates over a collection of rooted file paths, creating a new collection containing the relative versions.
/// </summary>
/// <remarks>
/// If any of the paths cannot be relativized (because it does not have the same root as
/// the base path), the absolute version is added to the collection that's returned.
/// </remarks>
public static List<string> RelativizePathsIfPossible(IEnumerable<string> absolutePaths, string basePath)
{
List<string> relativePaths = new List<string>();
foreach (string absolutePath in absolutePaths)
{
if (CanRelativize(absolutePath, basePath))
{
relativePaths.Add(RelativePathFromAbsolute(absolutePath, basePath));
}
else
{
relativePaths.Add(absolutePath);
}
}
return relativePaths;
}
private static bool CanRelativize(string absolutePath, string basePath)
{
if (absolutePath == null)
{
throw new ArgumentNullException("pathToRelativize");
}
if (basePath == null)
{
throw new ArgumentNullException("basePath");
}
if (!Path.IsPathRooted(absolutePath) || !Path.IsPathRooted(basePath))
{
throw new ArgumentException(Resources.BothMustBeRooted);
}
return string.Compare(Path.GetPathRoot(absolutePath), Path.GetPathRoot(basePath), StringComparison.OrdinalIgnoreCase) == 0;
}
/// <summary>
/// Transforms an absolute path to a relative one based on a specified base folder.
/// </summary>
public static string RelativePathFromAbsolute(string pathToRelativize, string basePath)
{
if (pathToRelativize == null)
{
throw new ArgumentNullException("pathToRelativize");
}
if (basePath == null)
{
throw new ArgumentNullException("basePath");
}
if (!Path.IsPathRooted(pathToRelativize) || !Path.IsPathRooted(basePath))
{
throw new ArgumentException(Resources.BothMustBeRooted);
}
if (string.Compare(Path.GetPathRoot(pathToRelativize), Path.GetPathRoot(basePath), StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException(Resources.BothMustHaveSameRoot);
}
// remove the ending "\" to simplify the algorithm below
basePath = basePath.TrimEnd(Path.DirectorySeparatorChar);
string commonBase = FindCommonBasePath(pathToRelativize, basePath, true);
if (commonBase.Length == basePath.Length)
{
string result = pathToRelativize.Substring(commonBase.Length);
if (result[0] == Path.DirectorySeparatorChar)
{
result = result.Substring(1, result.Length - 1);
}
return result;
}
else
{
int backOutCount = CountInstances(basePath.Substring(commonBase.Length), Path.DirectorySeparatorChar);
string result = Duplicate(".." + Path.DirectorySeparatorChar, backOutCount) + pathToRelativize.Substring(commonBase.Length + 1);
return result;
}
}
/// <summary>
/// Duplicates a specified string a specified number of times.
/// </summary>
public static string Duplicate(string text, int count)
{
if (text == null)
{
throw new ArgumentNullException("text");
}
StringBuilder result = new StringBuilder(text.Length * count);
for (int i = 0; i < count; ++i)
{
result.Append(text);
}
return result.ToString();
}
/// <summary>
/// Returns the number of instances of a given character in a string.
/// </summary>
public static int CountInstances(string text, char toFind)
{
if (text == null)
{
throw new ArgumentNullException("text");
}
int result = 0;
foreach (char c in text)
{
if (c == toFind)
{
++result;
}
}
return result;
}
/// <summary>
/// Returns the longest string <c>first</c> and <c>second</c> have in common beginning at index 0.
/// </summary>
public static string FindCommonBasePath(string first, string second, bool ignoreCase)
{
if (first == null)
{
throw new ArgumentNullException("first");
}
if (second == null)
{
throw new ArgumentNullException("second");
}
string[] parts1 = first.Split(new char[] { Path.DirectorySeparatorChar });
string[] parts2 = second.Split(new char[] { Path.DirectorySeparatorChar });
int length = 0;
for (; length < parts1.Length && length < parts2.Length; ++length)
{
if (ignoreCase)
{
if (string.Compare(parts1[length], parts2[length], StringComparison.OrdinalIgnoreCase) != 0)
{
break;
}
}
else
{
if (string.Compare(parts1[length], parts2[length], StringComparison.Ordinal) != 0)
{
break;
}
}
}
if (length == 0)
{
// nothing in common
return string.Empty;
}
return string.Join(char.ToString(Path.DirectorySeparatorChar), parts1, startIndex: 0, count: length);
}
public static bool UnorderedCollectionsAreEqual<T>(ICollection<T> first, ICollection<T> second)
{
if (first == null)
{
throw new ArgumentNullException("first");
}
if (second == null)
{
throw new ArgumentNullException("second");
}
if (first.Count != second.Count)
{
return false;
}
foreach (T item in first)
{
if (!second.Contains(item))
{
return false;
}
}
return true;
}
public static bool OrderedCollectionsAreEqual<T>(IList<T> first, IList<T> second)
{
if (first == null)
{
throw new ArgumentNullException("first");
}
if (second == null)
{
throw new ArgumentNullException("second");
}
if (first.Count != second.Count)
{
return false;
}
for (int i = 0; i < first.Count; ++i)
{
if (second.IndexOf(first[i]) != i)
{
return false;
}
}
return true;
}
public static string EncodeProgramFilesVar(string path)
{
if (string.IsNullOrEmpty(path))
{
return path;
}
string programFiles = Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles);
if (path.StartsWith(programFiles, StringComparison.OrdinalIgnoreCase))
{
return "$(ProgramFiles)" + path.Substring(programFiles.Length);
}
else
{
return path;
}
}
public const int RemotingChannel = 9000;
public static string GetRemotingUri(int procId, bool includeLocalHostPrefix)
{
if (includeLocalHostPrefix)
{
return string.Format("tcp://localhost:{0}/ScannerHost-{1}", RemotingChannel, procId);
}
else
{
return string.Format("ScannerHost-{0}", procId);
}
}
}
}
| |
namespace Examples
{
using System;
using System.Linq;
using NSubstitute;
using Sitecore.Configuration;
using Xunit;
[Trait("Category", "RequireLicense")]
public class GettingStarted
{
#region Content
/// <summary>
/// The code below creates a fake in-memory database with a single item Home that
/// contains field Title with value 'Welcome!' (xUnit unit testing framework is used):
/// </summary>
[Fact]
public void HowToCreateSimpleItem()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("Home") {{"Title", "Welcome!"}}
})
{
Sitecore.Data.Items.Item home = db.GetItem("/sitecore/content/home");
Xunit.Assert.Equal("Welcome!", home["Title"]);
}
}
[Fact]
public void HowToCreateItemUnderSystem()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("Home") {ParentID = Sitecore.ItemIDs.SystemRoot}
})
{
Sitecore.Data.Items.Item home = db.GetItem("/sitecore/system/home");
Xunit.Assert.Equal("home", home.Key);
}
}
[Fact]
public void HowToCreateItemHierarchy()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("Articles")
{
new Sitecore.FakeDb.DbItem("Getting Started"),
new Sitecore.FakeDb.DbItem("Troubleshooting")
}
})
{
Sitecore.Data.Items.Item articles = db.GetItem("/sitecore/content/Articles");
Xunit.Assert.NotNull(articles.Children["Getting Started"]);
Xunit.Assert.NotNull(articles.Children["Troubleshooting"]);
}
}
[Fact]
public void HowToCreateMultilingualItem()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
{
new Sitecore.FakeDb.DbField("Title") {{"en", "Hello!"}, {"da", "Hej!"}}
}
})
{
Sitecore.Data.Items.Item homeEn = db.GetItem("/sitecore/content/home", "en");
Xunit.Assert.Equal("Hello!", homeEn["Title"]);
Sitecore.Data.Items.Item homeDa = db.GetItem("/sitecore/content/home", "da");
Xunit.Assert.Equal("Hej!", homeDa["Title"]);
}
}
[Fact]
public void HowToCreateItemWithSpecificTemplate()
{
Sitecore.Data.ID templateId = Sitecore.Data.ID.NewID;
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbTemplate("products", templateId) {"Name"},
new Sitecore.FakeDb.DbItem("Apple") {TemplateID = templateId}
})
{
Sitecore.Data.Items.Item item = db.GetItem("/sitecore/content/apple");
Xunit.Assert.Equal(templateId, item.TemplateID);
Xunit.Assert.NotNull(item.Fields["Name"]);
}
}
[Fact]
public void HowToCreateVersionedItem()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
{
new Sitecore.FakeDb.DbField("Title")
{
{"en", 1, "Hello!"},
{"en", 2, "Welcome!"}
}
}
})
{
Sitecore.Data.Items.Item home1 = db.GetItem("/sitecore/content/home", "en", 1);
Xunit.Assert.Equal("Hello!", home1["Title"]);
Sitecore.Data.Items.Item home2 = db.GetItem("/sitecore/content/home", "en", 2);
Xunit.Assert.Equal("Welcome!", home2["Title"]);
}
}
[Fact]
public void HowToCreateTemplateWithStandardValues()
{
var templateId = new Sitecore.Data.TemplateID(Sitecore.Data.ID.NewID);
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
// create template with field Title and standard value $name
new Sitecore.FakeDb.DbTemplate("Sample", templateId) {{"Title", "$name"}}
})
{
// add item based on the template to the content root
Sitecore.Data.Items.Item contentRoot = db.GetItem(Sitecore.ItemIDs.ContentRoot);
Sitecore.Data.Items.Item item = contentRoot.Add("Home", templateId);
// the Title field is set to 'Home'
Xunit.Assert.Equal("Home", item["Title"]);
}
}
[Fact]
public void HowToCreateTemplateHierarchy()
{
var baseTemplateIdOne = Sitecore.Data.ID.NewID;
var baseTemplateIdTwo = Sitecore.Data.ID.NewID;
var templateId = Sitecore.Data.ID.NewID;
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbTemplate("base one", baseTemplateIdOne),
new Sitecore.FakeDb.DbTemplate("base two", baseTemplateIdTwo),
new Sitecore.FakeDb.DbTemplate("Main", templateId)
{
BaseIDs = new[] {baseTemplateIdOne, baseTemplateIdTwo}
}
})
{
var template =
Sitecore.Data.Managers.TemplateManager.GetTemplate(templateId, db.Database);
Xunit.Assert.Contains(baseTemplateIdOne, template.BaseIDs);
Xunit.Assert.Contains(baseTemplateIdTwo, template.BaseIDs);
Xunit.Assert.True(template.InheritsFrom(baseTemplateIdOne));
Xunit.Assert.True(template.InheritsFrom(baseTemplateIdTwo));
}
}
[Fact]
public void HowToCreateLinkFieldUsingRawValue()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
{
{"link", "<link linktype=\"external\" url=\"http://google.com\" />"}
}
})
{
var item = db.GetItem("/sitecore/content/home");
var linkField = (Sitecore.Data.Fields.LinkField)item.Fields["link"];
Xunit.Assert.Equal("external", linkField.LinkType);
Xunit.Assert.Equal("http://google.com", linkField.Url);
}
}
[Fact]
public void HowToCreateLinkFieldUsingDbLinkField()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
{
new Sitecore.FakeDb.DbLinkField("link")
{
LinkType = "external",
Url = "http://google.com"
}
}
})
{
var item = db.GetItem("/sitecore/content/home");
var linkField = (Sitecore.Data.Fields.LinkField)item.Fields["link"];
Xunit.Assert.Equal("external", linkField.LinkType);
Xunit.Assert.Equal("http://google.com", linkField.Url);
}
}
#endregion
#region Security
[Fact]
public void HowToMockAuthenticationProvider()
{
// create and configure authentication provider mock
var provider =
Substitute.For<Sitecore.Security.Authentication.AuthenticationProvider>();
provider.Login("John", true).Returns(true);
// switch the authentication provider so the mocked version is used
using (new Sitecore.Security.Authentication.AuthenticationSwitcher(provider))
{
// the authentication manager is called with expected parameters and returns True
Xunit.Assert.True(
Sitecore.Security.Authentication.AuthenticationManager.Login("John", true));
// the authentication manager is called with wrong parameters and returns False
Xunit.Assert.False(
Sitecore.Security.Authentication.AuthenticationManager.Login("Robber", true));
}
}
[Obsolete]
[Fact]
public void HowToMockRoleProvider()
{
// create and configure role provider mock
string[] roles = { @"sitecore/Authors", @"sitecore/Editors" };
var provider = Substitute.For<System.Web.Security.RoleProvider>();
provider.GetAllRoles().Returns(roles);
// switch the role provider so the mocked version is used
using (new Sitecore.FakeDb.Security.Web.RoleProviderSwitcher(provider))
{
string[] resultRoles = System.Web.Security.Roles.GetAllRoles();
Xunit.Assert.Contains(@"sitecore/Authors", resultRoles);
Xunit.Assert.Contains(@"sitecore/Editors", resultRoles);
}
}
[Obsolete]
[Fact]
public void HowToMockMembershipProvider()
{
// create fake membership user
var user = new Sitecore.FakeDb.Security.Accounts.FakeMembershipUser();
// create membership provider mock
var provider = NSubstitute.Substitute.For<System.Web.Security.MembershipProvider>();
provider.GetUser(@"extranet\John", true).Returns(user);
// switch the membership provider
using (new Sitecore.FakeDb.Security.Web.MembershipSwitcher(provider))
{
// check if the user exists
var exists = Sitecore.Security.Accounts.User.Exists(@"extranet\John");
Xunit.Assert.True(exists);
}
}
[Obsolete]
[Fact]
public void HowToUnitTestItemSecurityWithFakeProvider()
{
// create sample item
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
})
{
Sitecore.Data.Items.Item home = db.GetItem("/sitecore/content/home");
// call your business logic that changes the item security, e.g. denies Read
// for Editors
var account = Sitecore.Security.Accounts.Role.FromName(@"sitecore\Editors");
var accessRight = Sitecore.Security.AccessControl.AccessRight.ItemRead;
var propagationType = Sitecore.Security.AccessControl.PropagationType.Entity;
var permission = Sitecore.Security.AccessControl.AccessPermission.Deny;
Sitecore.Security.AccessControl.AccessRuleCollection rules =
new Sitecore.Security.AccessControl.AccessRuleCollection
{
Sitecore.Security.AccessControl.AccessRule.Create
(account, accessRight, propagationType, permission)
};
Sitecore.Security.AccessControl.AuthorizationManager.SetAccessRules(home, rules);
// check the account cannot read the item
Xunit.Assert.False(home.Security.CanRead(account));
}
}
[Fact]
public void HowToSwitchContextUser()
{
using (new Sitecore.Security.Accounts.UserSwitcher(@"extranet\John", true))
{
Xunit.Assert.Equal(@"extranet\John", Sitecore.Context.User.Name);
}
}
[Fact]
public void HowToConfigureItemAccess()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
// set Access.CanRead to False
new Sitecore.FakeDb.DbItem("home") {Access = {CanRead = false}}
})
{
Sitecore.Data.Items.Item item = db.GetItem("/sitecore/content/home");
// item is null because read is denied
Xunit.Assert.Null(item);
}
}
[Fact]
public void HowToSetUserIsAdministrator()
{
var user = Substitute.For<Sitecore.Security.Accounts.User>(@"extranet\John", true);
user.IsAdministrator.Returns(true);
Xunit.Assert.True(user.IsAdministrator);
}
[Fact]
public void HowToMockUserProfile()
{
var user = Substitute.For<Sitecore.Security.Accounts.User>(@"extranet\John", true);
user.Profile.Returns(Substitute.For<Sitecore.Security.UserProfile>());
user.Profile.ClientLanguage.Returns("da");
user.Profile.Email.Returns("john@mail.com");
Xunit.Assert.Equal("da", user.Profile.ClientLanguage);
Xunit.Assert.Equal("john@mail.com", user.Profile.Email);
}
#endregion
#region Pipelines
[Fact]
public void HowToUnitTestPipelineCallWithMockedProcessor()
{
var args = new Sitecore.Pipelines.PipelineArgs();
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
// create pipeline processor mock and register it in the Pipeline Watcher
var processor = Substitute.For<Sitecore.FakeDb.Pipelines.IPipelineProcessor>();
db.PipelineWatcher.Register("mypipeline", processor);
// call the pipeline
Sitecore.Pipelines.CorePipeline.Run("mypipeline", args);
// and check the mocked processor received the Process method call with proper arguments
processor.Received().Process(args);
}
}
[Fact]
public void HowToUnitTestAdvancedPipelineCallWithMockedProcessor()
{
var args = new Sitecore.Pipelines.PipelineArgs();
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
// create pipeline processor mock and register it in the Pipeline Watcher
var processor = Substitute.For<Sitecore.FakeDb.Pipelines.IPipelineProcessor>();
processor
.When(p => p.Process(args))
.Do(ci => ci.Arg<Sitecore.Pipelines.PipelineArgs>().CustomData["Result"] = "Ok");
db.PipelineWatcher.Register("mypipeline", processor);
// call the pipeline
Sitecore.Pipelines.CorePipeline.Run("mypipeline", args);
// and check the result
Assert.Equal("Ok", args.CustomData["Result"]);
}
}
/// <summary>
/// Imagine you have a product repository. The repository should be able to get a
/// product by id. The implementation of the repository is 'thin' and does nothing
/// else than calling a corresponding pipeline with proper arguments. The next
/// example shows how to unit test the pipeline calls (please note that the
/// pipeline is not defined in config file):
/// </summary>
[Fact]
public void HowToUnitTestPipelineCall()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
// configure a pipeline watcher to expect the "createProduct" pipeline call with
// product name passed to the arguments custom data
db.PipelineWatcher
.Expects("createProduct", a => a.CustomData["ProductName"].Equals("MyProduct"));
// create a repository and call the create product method
var repository = new ProductRepository();
repository.CreateProduct("MyProduct");
// assert the expected pipeline is called and the product name is passed
db.PipelineWatcher.EnsureExpectations();
}
}
private partial class ProductRepository
{
public void CreateProduct(string name)
{
var args = new Sitecore.Pipelines.PipelineArgs();
args.CustomData["ProductName"] = name;
Sitecore.Pipelines.CorePipeline.Run("createProduct", args);
}
}
/// <summary>
/// How to configure the pipeline behaviour.
/// The code sample above checks that the pipeline is called with proper arguments.
/// The next scenario would be to validate the pipeline call results.
/// In the code below we configure pipeline proressor behaviour to return an expected
/// product only if the product id is set to "1".
/// </summary>
[Fact]
public void HowToConfigurePipelineBehaviour()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
// create a product to get from the repository
object expectedProduct = new object();
string productId = "1";
// configure Pipeline Watcher to expect a pipeline call where the args Custom Data
// contains ProductId equals "1". Once the args received the pipeline result is set
// to the Product Custom Data property
db.PipelineWatcher
.WhenCall("findProductById")
.WithArgs(a => a.CustomData["ProductId"].Equals(productId))
.Then(a => a.CustomData["Product"] = expectedProduct);
// create a repository and call get product method
ProductRepository repository = new ProductRepository();
var actualProduct = repository.GetProductById(productId);
// assert the received product is the same as the expected one
Xunit.Assert.Equal(expectedProduct, actualProduct);
}
}
private partial class ProductRepository
{
public object GetProductById(string id)
{
var args = new Sitecore.Pipelines.PipelineArgs();
args.CustomData["ProductId"] = id;
Sitecore.Pipelines.CorePipeline.Run("findProductById", args);
return args.CustomData["Product"];
}
}
#endregion
#region Configuration
[Fact]
public void HowToConfigureSettings()
{
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
// set the setting value in unit test using db instance
db.Configuration.Settings["MySetting"] = "1234";
// get the setting value in your code using regular Sitecore API
var value = Sitecore.Configuration.Settings.GetSetting("MySetting");
Xunit.Assert.Equal("1234", value);
}
}
/// <summary>
/// By default Sitecore set `singleInstance="true"` for all databases so that each
/// of the three default databases behaves as singletones. This approach has list
/// of pros and cons; it is important to be avare about potential issues that may
/// appear.
///
/// Single instance allows one to resolve a database in any place of code using
/// Sitecore Factory. The same content is available no matter how many times the
/// database has been resolved. The next code creates item Home using simplified
/// FakeDb API and then reads the item from database resolved from Factory:
/// </summary>
[Fact]
public void HowToGetItemFromSitecoreDatabase()
{
using (new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("Home")
})
{
Sitecore.Data.Database database =
Sitecore.Configuration.Factory.GetDatabase("master");
Xunit.Assert.NotNull(database.GetItem("/sitecore/content/home"));
}
}
#endregion
#region Miscellaneous
[Fact]
public void HowToSwitchContextSite()
{
// Create a fake Site Context and configure the required parameters.
// Please note that there is no registration in the App.config file required.
var fakeSite = new Sitecore.FakeDb.Sites.FakeSiteContext(
new Sitecore.Collections.StringDictionary
{
{"name", "website"},
{"database", "web"}
});
// switch the context site
using (new Sitecore.FakeDb.Sites.FakeSiteContextSwitcher(fakeSite))
{
Xunit.Assert.Equal("website", Sitecore.Context.Site.Name);
Xunit.Assert.Equal("web", Sitecore.Context.Site.Database.Name);
Xunit.Assert.Equal("website", Factory.GetSite("website").Name);
}
}
[Fact]
public void HowToWorkWithLinkDatabase()
{
// arrange your database and items
Sitecore.Data.ID sourceId = Sitecore.Data.ID.NewID;
Sitecore.Data.ID aliasId = Sitecore.Data.ID.NewID;
Sitecore.Data.ID linkedItemId = Sitecore.Data.ID.NewID;
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("source", sourceId),
new Sitecore.FakeDb.DbItem("clone"),
new Sitecore.FakeDb.DbItem("alias", aliasId, Sitecore.TemplateIDs.Alias)
{
new Sitecore.FakeDb.DbField("Linked item", linkedItemId)
}
})
{
// arrange desired LinkDatabase behavior
var behavior = Substitute.For<Sitecore.Links.LinkDatabase>();
Sitecore.Data.Items.Item source = db.GetItem("/sitecore/content/source");
Sitecore.Data.Items.Item alias = db.GetItem("/sitecore/content/alias");
Sitecore.Data.Items.Item clone = db.GetItem("/sitecore/content/clone");
string sourcePath = source.Paths.FullPath;
behavior.GetReferrers(source).Returns(new[]
{
new Sitecore.Links.ItemLink(alias, linkedItemId, source, sourcePath),
new Sitecore.Links.ItemLink(clone, Sitecore.FieldIDs.Source, source, sourcePath)
});
// link database is clean
Xunit.Assert.Empty(Sitecore.Globals.LinkDatabase.GetReferrers(source));
using (new Sitecore.FakeDb.Links.LinkDatabaseSwitcher(behavior))
{
Sitecore.Links.ItemLink[] referrers =
Sitecore.Globals.LinkDatabase.GetReferrers(source);
const int expected = 2;
Xunit.Assert.Equal(referrers.Count(), expected);
Xunit.Assert.Single(referrers.Where(r => r.SourceItemID == clone.ID
&& r.TargetItemID == source.ID));
Xunit.Assert.Single(referrers.Where(r => r.SourceItemID == alias.ID
&& r.TargetItemID == source.ID));
}
// link database is clean again
Xunit.Assert.Empty(Sitecore.Globals.LinkDatabase.GetReferrers(source));
}
}
[Obsolete]
[Fact]
public void HowToWorkWithQueryApi()
{
const string Query = "/sitecore/content/*[@@key = 'home']";
const string expected = "home";
using (new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
})
{
Sitecore.Data.Items.Item[] result =
Sitecore.Data.Query.Query.SelectItems(Query);
Xunit.Assert.Single(result);
Xunit.Assert.Equal(result[0].Key, expected);
}
}
[Fact]
public void HowToWorkWithFastQueryApi()
{
const string Query = "fast:/sitecore/content/*[@@key = 'home']";
const string expected = "home";
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
})
{
Sitecore.Data.Items.Item homeItem = db.Database.SelectSingleItem(Query);
Xunit.Assert.Equal(homeItem.Key, expected);
}
}
[Fact]
public void HowToMockContentSearchLogic()
{
var index = Substitute.For<Sitecore.ContentSearch.ISearchIndex>();
// don't forget to clean up.
Sitecore.ContentSearch
.ContentSearchManager.SearchConfiguration.Indexes["my_index"] = index;
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
})
{
// configure a search result item behavior.
var searchResultItem =
Substitute.For<Sitecore.ContentSearch.SearchTypes.SearchResultItem>();
var expectedItem = db.GetItem("/sitecore/content/home");
searchResultItem.GetItem().Returns(expectedItem);
// configure a search ndex behavior.
index.CreateSearchContext()
.GetQueryable<Sitecore.ContentSearch.SearchTypes.SearchResultItem>()
.Returns((new[] { searchResultItem }).AsQueryable());
// get the item from the search index and check the expectations.
Sitecore.Data.Items.Item actualItem =
index.CreateSearchContext()
.GetQueryable<Sitecore.ContentSearch.SearchTypes.SearchResultItem>()
.Single()
.GetItem();
Xunit.Assert.Equal(expectedItem, actualItem);
}
}
[Fact]
public void HowToMockIdTable()
{
// arrange
var id = Sitecore.Data.ID.NewID;
var parentId = Sitecore.Data.ID.NewID;
var data = "{ }";
var provider = Substitute.For<Sitecore.Data.IDTables.IDTableProvider>();
using (new Sitecore.FakeDb.Data.IDTables.IDTableProviderSwitcher(provider))
{
// act
var actualEntry
= Sitecore.Data.IDTables.IDTable.Add("my_pref", "my_key", id, parentId, data);
// assert
Xunit.Assert.Equal("my_pref", actualEntry.Prefix);
Xunit.Assert.Equal("my_key", actualEntry.Key);
Xunit.Assert.Equal(id, actualEntry.ID);
Xunit.Assert.Equal(parentId, actualEntry.ParentID);
Xunit.Assert.Equal(data, actualEntry.CustomData);
}
}
#endregion
#region Blobs
[Fact(Skip = "Temporary disabled. TBI in #214.")]
public void HowToSetAndGetBlobStream()
{
// arrange
var stream = new System.IO.MemoryStream();
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db
{
new Sitecore.FakeDb.DbItem("home")
{
new Sitecore.FakeDb.DbField("field")
}
})
{
Sitecore.Data.Items.Item item = db.GetItem("/sitecore/content/home");
Sitecore.Data.Fields.Field field = item.Fields["field"];
using (new Sitecore.Data.Items.EditContext(item))
{
// act
field.SetBlobStream(stream);
}
// assert
Xunit.Assert.Equal(stream.ToArray(),
((System.IO.MemoryStream)field.GetBlobStream()).ToArray());
}
}
#endregion
#region Translate
[Fact]
public void HowToCheckTranslateTextIsCalled()
{
const string Phrase = "Welcome!";
// Enable the 'FakeDb.AutoTranslate' setting.
// It can be done either statically in the 'App.config' file or
// dynamically in a particular test.
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
db.Configuration.Settings.AutoTranslate = true;
// translate
string translatedPhrase = Sitecore.Globalization.Translate.Text(Phrase);
// note the '*' symbol at the end of the translated phrase
Xunit.Assert.Equal("Welcome!*", translatedPhrase);
}
}
/// <summary>
/// FakeDb supports simple localization mechanism. You can call Translate.Text() or
/// Translate.TextByLanguage() method to get a 'translated' version of the original text.
/// The translated version has got language name added to the initial phrase.
/// </summary>
[Fact]
public void HowToUnitTestLocalization()
{
// init languages
Sitecore.Globalization.Language en = Sitecore.Globalization.Language.Parse("en");
Sitecore.Globalization.Language da = Sitecore.Globalization.Language.Parse("da");
const string Phrase = "Welcome!";
using (Sitecore.FakeDb.Db db = new Sitecore.FakeDb.Db())
{
db.Configuration.Settings.AutoTranslate = true;
db.Configuration.Settings.AutoTranslatePrefix = "{lang}:";
// translate
string enTranslation = Sitecore.Globalization.Translate.TextByLanguage(Phrase, en);
string daTranslation = Sitecore.Globalization.Translate.TextByLanguage(Phrase, da);
Xunit.Assert.Equal("en:Welcome!", enTranslation);
Xunit.Assert.Equal("da:Welcome!", daTranslation);
}
}
#endregion
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
namespace OpenLiveWriter.CoreServices
{
public sealed class ArrayHelper
{
/// <summary>
/// Cannot be instantiated or subclassed
/// </summary>
private ArrayHelper()
{
}
/// <summary>
/// Removes nulls from an array of objects.
/// </summary>
public static object[] Compact(object[] arr)
{
int i = 0;
int j = 0;
for (; i < arr.Length; i++)
{
if (i != j)
arr[j] = arr[i];
if (arr[i] != null)
j++;
}
if (i == j)
return arr;
else
{
Array a = (Array)arr;
Truncate(ref a, j);
return (object[])a;
}
}
/// <summary>
/// truncates the first dimension of any array
/// </summary>
public static void Truncate(ref Array arr, int len)
{
if (arr.Length == len)
return;
Array arr1 = Array.CreateInstance(arr.GetType().GetElementType(), len);
Array.Copy(arr, arr1, len);
arr = arr1;
}
/// <summary>
/// Return the intersection of n arrays.
/// </summary>
/// <param name="arrays">
/// 0..n arrays (or an array of arrays). All arrays should have
/// the same underlying type. Duplicate elements within an
/// input array is allowed, but the return value is guaranteed not
/// to contain duplicates.
/// </param>
/// <returns>
/// An array of the elements which are present in all arrays.
/// The order of the elements is not guaranteed.
/// </returns>
public static Array Intersection(Array[] arrays)
{
Type underlyingType = arrays.GetType().GetElementType().GetElementType();
if (arrays.Length == 0)
return Array.CreateInstance(underlyingType, 0);
if (arrays.Length == 1)
return arrays[0];
// using Hashtable as a HashSet
Hashtable outerTable = new Hashtable(arrays[0].Length * 2 + 1);
bool isFirstTrip = true;
foreach (Array array in arrays)
{
Hashtable innerTable = new Hashtable(array.Length);
foreach (object o in array)
{
// prevent per-array duplicates from being counted more than once
if (innerTable.ContainsKey(o))
continue;
innerTable[o] = "";
if (!isFirstTrip && !outerTable.ContainsKey(o))
{
// if this is not the first array we're looking at, don't even
// bother counting items that aren't already in the table--we
// know they won't be a part of the final set
continue;
}
else if (isFirstTrip)
{
outerTable[o] = 0;
}
outerTable[o] = (int)outerTable[o] + 1;
}
isFirstTrip = false;
}
Array intersection = new object[outerTable.Count];
int pos = 0;
foreach (DictionaryEntry entry in outerTable)
{
if ((int)entry.Value == arrays.Length)
{
intersection.SetValue(entry.Key, pos++);
}
}
Truncate(ref intersection, pos);
if ( intersection.Length > 0 )
return Narrow(intersection, underlyingType);
else
return intersection ;
}
/// <summary>
/// Return the union of n arrays.
/// </summary>
/// <param name="arrays">
/// 0..n arrays (or an array of arrays). All arrays should have
/// the same underlying type. Duplicate elements within an
/// input array is allowed, but the return value is guaranteed not
/// to contain duplicates.
/// </param>
/// <returns>
/// An array of the elements which are present in any array.
/// The order of the elements is not guaranteed.
/// </returns>
public static T[] Union<T>(params T[][] arrays)
{
if (arrays.Length == 0)
return new T[0];
if (arrays.Length == 1)
return arrays[0];
// using Hashtable as a HashSet
Hashtable table = new Hashtable(arrays[0].Length * arrays.Length + 1);
foreach (Array array in arrays)
{
foreach (object o in array)
{
table[o] = true;
}
}
T[] union = new T[table.Count];
int pos = 0;
foreach (object o in table.Keys)
{
union.SetValue(o, pos++);
}
return union;
}
public static Array Narrow(Array array, Type type)
{
Array newArray = Array.CreateInstance(type, array.LongLength);
Array.Copy(array, newArray, array.LongLength);
return newArray;
}
public delegate object ArrayMapperDelegate(object input);
/// <summary>
/// Map the elements of an array onto a new array via a provided function.
/// </summary>
public static Array Map(Array array, Type newArrayType, ArrayMapperDelegate mapper)
{
Array newArray = Array.CreateInstance(newArrayType, array.LongLength);
for (long i = 0; i < newArray.LongLength; i++)
{
newArray.SetValue(mapper(array.GetValue(i)), i);
}
return newArray;
}
public static Array CollectionToArray(ICollection collection, Type elementType)
{
Array newArray = Array.CreateInstance(elementType, collection.Count);
collection.CopyTo(newArray, 0);
return newArray;
}
public static Array EnumerableToArray(IEnumerable enumerable, int count, Type elementType)
{
Array newArray = Array.CreateInstance(elementType, count);
int i = 0;
foreach (object o in enumerable)
newArray.SetValue(o, i++);
return newArray;
}
/// <summary>
/// Compares two arrays for equality.
/// </summary>
/// <param name="a">Array a.</param>
/// <param name="b">Array b.</param>
/// <returns>true if the string arrays are equal; otherwise, false.</returns>
public static bool Compare(object[] a, object[] b)
{
if (a == b)
return true;
else if (a == null || b == null)
return false;
else if (a.Length != b.Length)
return false;
else if (a.GetType() != b.GetType())
return false;
else
{
for (int i = 0; i < a.Length; i++)
if (a[i] != b[i])
return false;
return true;
}
}
public static bool CompareBytes(byte[] a, byte[] b)
{
if (a == b)
return true;
else if (a == null || b == null)
return false;
else if (a.Length != b.Length)
return false;
else
{
for (int i = 0; i < a.Length; i++)
if (a[i] != b[i])
return false;
return true;
}
}
public static void Swap(Array array, int indexOne, int indexTwo)
{
int length = array.Length;
if (length <= indexOne || length <= indexTwo || 0 > indexOne || 0 > indexTwo)
throw new IndexOutOfRangeException();
object tmp = array.GetValue(indexOne);
array.SetValue(array.GetValue(indexTwo), indexOne);
array.SetValue(tmp, indexTwo);
}
/// <summary>
/// Searches an array for an item using an external comparer to determine if the items are equal.
/// </summary>
/// <param name="array">the array to search for the matching item.</param>
/// <param name="searchState">parameter passed as state to the ArrayItemSearchHitComparer delegate</param>
/// <param name="hitTester">the delegate for testing the whether the current index is a hit.</param>
/// <returns></returns>
public static int SearchForIndexOf<T,K>(K[] array, T searchState, ArraySearchHitTester<T,K> hitTester)
{
for(int i=0; i<array.Length; i++)
{
if(hitTester(searchState, array[i]))
return i;
}
return -1;
}
public delegate bool ArraySearchHitTester<L, M>(L searchState, M arrayItem);
/// <summary>
/// Stable sort, in-place, not too slow if the list is already mostly sorted
/// </summary>
public static void InsertionSort<T>(List<T> list, Comparison<T> comparison)
{
for (int i = 1; i < list.Count; i++)
{
T item = list[i];
int j = i - 1;
// TODO: Use binary search instead of linear probe
while (j >= 0 && comparison(list[j], item) > 0)
--j;
if (j+1 != i)
{
list.RemoveAt(i);
list.Insert(j+1, item);
}
}
}
public static TElement[] Concat<TElement>(TElement[] a, TElement[] b)
{
TElement[] result = new TElement[a.Length + b.Length];
Array.Copy(a, result, a.Length);
Array.Copy(b, 0, result, a.Length, b.Length);
return result;
}
/// <summary>
/// Determines whether any element of an array satisifies a condition.
/// </summary>
/// <param name="array">The array to test.</param>
/// <param name="predicate">A function to test each element for a condition.</param>
/// <returns>true if any element of the array satisfies the condition, and false otherwise.</returns>
public static bool Any<TElement>(TElement[] array, Predicate<TElement> predicate)
{
foreach (TElement element in array)
{
if (predicate(element))
{
return true;
}
}
return false;
}
}
}
| |
using System;
namespace Versioning
{
public class InvoiceRecord : Sage_Container, ILinkRecord
{
/* Autogenerated by sage_wrapper_generator.pl */
SageDataObject110.InvoiceRecord ir11;
SageDataObject120.InvoiceRecord ir12;
SageDataObject130.InvoiceRecord ir13;
SageDataObject140.InvoiceRecord ir14;
SageDataObject150.InvoiceRecord ir15;
SageDataObject160.InvoiceRecord ir16;
SageDataObject170.InvoiceRecord ir17;
public InvoiceRecord(object inner, int version)
: base(version) {
switch (m_version) {
case 11: {
ir11 = (SageDataObject110.InvoiceRecord)inner;
m_fields = new Fields(ir11.Fields,m_version);
return;
}
case 12: {
ir12 = (SageDataObject120.InvoiceRecord)inner;
m_fields = new Fields(ir12.Fields,m_version);
return;
}
case 13: {
ir13 = (SageDataObject130.InvoiceRecord)inner;
m_fields = new Fields(ir13.Fields,m_version);
return;
}
case 14: {
ir14 = (SageDataObject140.InvoiceRecord)inner;
m_fields = new Fields(ir14.Fields,m_version);
return;
}
case 15: {
ir15 = (SageDataObject150.InvoiceRecord)inner;
m_fields = new Fields(ir15.Fields,m_version);
return;
}
case 16: {
ir16 = (SageDataObject160.InvoiceRecord)inner;
m_fields = new Fields(ir16.Fields,m_version);
return;
}
case 17: {
ir17 = (SageDataObject170.InvoiceRecord)inner;
m_fields = new Fields(ir17.Fields,m_version);
return;
}
default: throw new InvalidOperationException("ver");
}
}
/* Autogenerated with record_generator.pl */
const string ACCOUNT_REF = "ACCOUNT_REF";
const string INVOICERECORD = "InvoiceRecord";
public int Invoice_Number {
get {
return (int)this[INVOICE_NUMBER];
}
set {
this[INVOICE_NUMBER] = value;
}
}
public InvoiceType Invoice_Type {
get {
return (InvoiceType)this[STR_INVOICE_TYPE_CODE];
}
set {
this[STR_INVOICE_TYPE_CODE] = value;
}
}
public bool Is_Product_Credit {
get {
return this.Invoice_Type == Versioning.InvoiceType.sdoProductCredit;
}
}
public bool Is_Product_Invoice {
get {
return this.Invoice_Type == Versioning.InvoiceType.sdoProductInvoice;
}
}
public bool AddNew() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.AddNew();
break;
}
case 12: {
ret = ir12.AddNew();
break;
}
case 13: {
ret = ir13.AddNew();
break;
}
case 14: {
ret = ir14.AddNew();
break;
}
case 15: {
ret = ir15.AddNew();
break;
}
case 16: {
ret = ir16.AddNew();
break;
}
case 17: {
ret = ir17.AddNew();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Update() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.Update();
break;
}
case 12: {
ret = ir12.Update();
break;
}
case 13: {
ret = ir13.Update();
break;
}
case 14: {
ret = ir14.Update();
break;
}
case 15: {
ret = ir15.Update();
break;
}
case 16: {
ret = ir16.Update();
break;
}
case 17: {
ret = ir17.Update();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Edit() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.Edit();
break;
}
case 12: {
ret = ir12.Edit();
break;
}
case 13: {
ret = ir13.Edit();
break;
}
case 14: {
ret = ir14.Edit();
break;
}
case 15: {
ret = ir15.Edit();
break;
}
case 16: {
ret = ir16.Edit();
break;
}
case 17: {
ret = ir17.Edit();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Find(bool partial) {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.Find(partial);
break;
}
case 12: {
ret = ir12.Find(partial);
break;
}
case 13: {
ret = ir13.Find(partial);
break;
}
case 14: {
ret = ir14.Find(partial);
break;
}
case 15: {
ret = ir15.Find(partial);
break;
}
case 16: {
ret = ir16.Find(partial);
break;
}
case 17: {
ret = ir17.Find(partial);
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public void Move(int iRecord) {
switch (m_version) {
case 11: {
ir11.Move(iRecord);
break;
}
case 12: {
ir12.Move(iRecord);
break;
}
case 13: {
ir13.Move(iRecord);
break;
}
case 14: {
ir14.Move(iRecord);
break;
}
case 15: {
ir15.Move(iRecord);
break;
}
case 16: {
ir16.Move(iRecord);
break;
}
case 17: {
ir17.Move(iRecord);
break;
}
default: throw new InvalidOperationException("ver");
}
}
public bool MoveFirst() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.MoveFirst();
break;
}
case 12: {
ret = ir12.MoveFirst();
break;
}
case 13: {
ret = ir13.MoveFirst();
break;
}
case 14: {
ret = ir14.MoveFirst();
break;
}
case 15: {
ret = ir15.MoveFirst();
break;
}
case 16: {
ret = ir16.MoveFirst();
break;
}
case 17: {
ret = ir17.MoveFirst();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MoveNext() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.MoveNext();
break;
}
case 12: {
ret = ir12.MoveNext();
break;
}
case 13: {
ret = ir13.MoveNext();
break;
}
case 14: {
ret = ir14.MoveNext();
break;
}
case 15: {
ret = ir15.MoveNext();
break;
}
case 16: {
ret = ir16.MoveNext();
break;
}
case 17: {
ret = ir17.MoveNext();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MoveLast() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.MoveLast();
break;
}
case 12: {
ret = ir12.MoveLast();
break;
}
case 13: {
ret = ir13.MoveLast();
break;
}
case 14: {
ret = ir14.MoveLast();
break;
}
case 15: {
ret = ir15.MoveLast();
break;
}
case 16: {
ret = ir16.MoveLast();
break;
}
case 17: {
ret = ir17.MoveLast();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool MovePrev() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.MovePrev();
break;
}
case 12: {
ret = ir12.MovePrev();
break;
}
case 13: {
ret = ir13.MovePrev();
break;
}
case 14: {
ret = ir14.MovePrev();
break;
}
case 15: {
ret = ir15.MovePrev();
break;
}
case 16: {
ret = ir16.MovePrev();
break;
}
case 17: {
ret = ir17.MovePrev();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool CanRemove() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.CanRemove();
break;
}
case 12: {
ret = ir12.CanRemove();
break;
}
case 13: {
ret = ir13.CanRemove();
break;
}
case 14: {
ret = ir14.CanRemove();
break;
}
case 15: {
ret = ir15.CanRemove();
break;
}
case 16: {
ret = ir16.CanRemove();
break;
}
case 17: {
ret = ir17.CanRemove();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
public bool Remove() {
bool ret;
switch (m_version) {
case 11: {
ret = ir11.Remove();
break;
}
case 12: {
ret = ir12.Remove();
break;
}
case 13: {
ret = ir13.Remove();
break;
}
case 14: {
ret = ir14.Remove();
break;
}
case 15: {
ret = ir15.Remove();
break;
}
case 16: {
ret = ir16.Remove();
break;
}
case 17: {
ret = ir17.Remove();
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
object ILink.Link {
get {
return Link;
}
}
public InvoiceItem Link {
get {
object ret;
switch (m_version) {
case 11: {
ret = ir11.Link;
break;
}
case 12: {
ret = ir12.Link;
break;
}
case 13: {
ret = ir13.Link;
break;
}
case 14: {
ret = ir14.Link;
break;
}
case 15: {
ret = ir15.Link;
break;
}
case 16: {
ret = ir16.Link;
break;
}
case 17: {
ret = ir17.Link;
break;
}
default: throw new InvalidOperationException("ver");
}
return new InvoiceItem(ret, m_version);
}
set {
switch (m_version) {
case 11: {
ir11.Link = value;
break;
}
case 12: {
ir12.Link = value;
break;
}
case 13: {
ir13.Link = value;
break;
}
case 14: {
ir14.Link = value;
break;
}
case 15: {
ir15.Link = value;
break;
}
case 16: {
ir16.Link = value;
break;
}
case 17: {
ir17.Link = value;
break;
}
}
}
}
public int Count {
get {
int ret;
switch (m_version) {
case 11: {
ret = ir11.Count;
break;
}
case 12: {
ret = ir12.Count;
break;
}
case 13: {
ret = ir13.Count;
break;
}
case 14: {
ret = ir14.Count;
break;
}
case 15: {
ret = ir15.Count;
break;
}
case 16: {
ret = ir16.Count;
break;
}
case 17: {
ret = ir17.Count;
break;
}
default: throw new InvalidOperationException("ver");
}
return ret;
}
set {
switch (m_version) {
case 11: {
ir11.Count = value;
break;
}
case 12: {
ir12.Count = value;
break;
}
case 13: {
ir13.Count = value;
break;
}
case 14: {
ir14.Count = value;
break;
}
case 15: {
ir15.Count = value;
break;
}
case 16: {
ir16.Count = value;
break;
}
case 17: {
ir17.Count = value;
break;
}
default: throw new InvalidOperationException("ver");
}
}
}
public LedgerType?
Ledger_Type {
get {
var invoice_type = Invoice_Type;
var ret = invoice_type.Ledger_Type();
return ret;
}
}
}
}
| |
//---------------------------------------------------------------------
// <copyright file="XmlConstants.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// XmlConstants for metadata
// </summary>
//
// @owner pratikp
//---------------------------------------------------------------------
#if ASTORIA_CLIENT
namespace System.Data.Services.Client
#else
namespace System.Data.Services
#endif
{
/// <summary>
/// Class that contains all the constants for various schemas.
/// </summary>
internal static class XmlConstants
{
#region CLR / Reflection constants.
/// <summary>"InitializeService" method name for service initialize.</summary>
internal const string ClrServiceInitializationMethodName = "InitializeService";
#endregion CLR / Reflection constants.
#region HTTP constants.
/// <summary>id of the corresponding body</summary>
internal const string HttpContentID = "Content-ID";
/// <summary>byte-length of the corresponding body</summary>
internal const string HttpContentLength = "Content-Length";
/// <summary>mime-type of the corresponding body</summary>
internal const string HttpContentType = "Content-Type";
/// <summary>content disposition of the response (a hint how to handle the response)</summary>
internal const string HttpContentDisposition = "Content-Disposition";
/// <summary>'DataServiceVersion' - HTTP header name for data service version.</summary>
internal const string HttpDataServiceVersion = "DataServiceVersion";
/// <summary>'MaxDataServiceVersion' - HTTP header name for maximum understood data service version.</summary>
internal const string HttpMaxDataServiceVersion = "MaxDataServiceVersion";
/// <summary>'no-cache' - HTTP value for Cache-Control header.</summary>
internal const string HttpCacheControlNoCache = "no-cache";
/// <summary>'charset' - HTTP parameter name.</summary>
internal const string HttpCharsetParameter = "charset";
/// <summary>HTTP method name for GET requests.</summary>
internal const string HttpMethodGet = "GET";
/// <summary>HTTP method name for POST requests.</summary>
internal const string HttpMethodPost = "POST";
/// <summary> Http Put Method name - basically used for updating resource.</summary>
internal const string HttpMethodPut = "PUT";
/// <summary>HTTP method name for delete requests.</summary>
internal const string HttpMethodDelete = "DELETE";
/// <summary>HTTP method name for </summary>
internal const string HttpMethodMerge = "MERGE";
/// <summary>HTTP query string parameter value for expand.</summary>
internal const string HttpQueryStringExpand = "$expand";
/// <summary>HTTP query string parameter value for filtering.</summary>
internal const string HttpQueryStringFilter = "$filter";
/// <summary>HTTP query string parameter value for ordering.</summary>
internal const string HttpQueryStringOrderBy = "$orderby";
/// <summary>HTTP query string parameter value for skipping elements.</summary>
internal const string HttpQueryStringSkip = "$skip";
/// <summary>HTTP query string parameter value for limiting the number of elements.</summary>
internal const string HttpQueryStringTop = "$top";
/// <summary>HTTP query string parameter value for counting query result set</summary>
internal const string HttpQueryStringInlineCount = "$inlinecount";
/// <summary>HTTP query string parameter value for skipping results based on paging.</summary>
internal const string HttpQueryStringSkipToken = "$skiptoken";
/// <summary>Property prefix for the skip token property in expanded results for a skip token</summary>
internal const string SkipTokenPropertyPrefix = "SkipTokenProperty";
/// <summary>HTTP query string parameter value for counting query result set</summary>
internal const string HttpQueryStringValueCount = "$count";
/// <summary>HTTP query string parameter value for projection.</summary>
internal const string HttpQueryStringSelect = "$select";
/// <summary>'q' - HTTP q-value parameter name.</summary>
internal const string HttpQValueParameter = "q";
/// <summary>'X-HTTP-Method' - HTTP header name for requests that want to tunnel a method through POST.</summary>
internal const string HttpXMethod = "X-HTTP-Method";
/// <summary>HTTP name for Accept header</summary>
internal const string HttpRequestAccept = "Accept";
/// <summary>HTTP name for If-Match header</summary>
internal const string HttpRequestAcceptCharset = "Accept-Charset";
/// <summary>HTTP name for If-Match header</summary>
internal const string HttpRequestIfMatch = "If-Match";
/// <summary>HTTP name for If-None-Match header</summary>
internal const string HttpRequestIfNoneMatch = "If-None-Match";
/// <summary>multi-part keyword in content-type to identify batch separator</summary>
internal const string HttpMultipartBoundary = "boundary";
#if ASTORIA_CLIENT
/// <summary>multi-part mixed batch separator</summary>
internal const string HttpMultipartBoundaryBatch = "batch";
/// <summary>multi-part mixed changeset separator</summary>
internal const string HttpMultipartBoundaryChangeSet = "changeset";
#endif
/// <summary>'Allow' - HTTP response header for allowed verbs.</summary>
internal const string HttpResponseAllow = "Allow";
/// <summary>'no-cache' - HTTP value for Cache-Control header.</summary>
internal const string HttpResponseCacheControl = "Cache-Control";
/// <summary>HTTP name for ETag header</summary>
internal const string HttpResponseETag = "ETag";
/// <summary>HTTP name for location header</summary>
internal const string HttpResponseLocation = "Location";
/// <summary>HTTP name for Status-Code header</summary>
internal const string HttpResponseStatusCode = "Status-Code";
/// <summary>multi-part mixed batch separator for response stream</summary>
internal const string HttpMultipartBoundaryBatchResponse = "batchresponse";
/// <summary>multi-part mixed changeset separator</summary>
internal const string HttpMultipartBoundaryChangesetResponse = "changesetresponse";
/// <summary>Content-Transfer-Encoding header for batch requests.</summary>
internal const string HttpContentTransferEncoding = "Content-Transfer-Encoding";
/// <summary>Http Version in batching requests and response.</summary>
internal const string HttpVersionInBatching = "HTTP/1.1";
/// <summary>To checks if the resource exists or not.</summary>
internal const string HttpAnyETag = "*";
/// <summary>Weak etags in HTTP must start with W/.
/// Look in http://www.ietf.org/rfc/rfc2616.txt?number=2616 section 14.19 for more information.</summary>
internal const string HttpWeakETagPrefix = "W/\"";
/// <summary>The character set the client wants the response to be in.</summary>
internal const string HttpAcceptCharset = "Accept-Charset";
/// <summary>The name of the Cookie HTTP header</summary>
internal const string HttpCookie = "Cookie";
/// <summary>The Slug header name. Used by ATOM to hint the server on which MR is being POSTed.</summary>
internal const string HttpSlug = "Slug";
#endregion HTTP constants.
#region MIME constants.
/// <summary>MIME type for requesting any media type.</summary>
internal const string MimeAny = "*/*";
/// <summary>MIME type for ATOM bodies (http://www.iana.org/assignments/media-types/application/).</summary>
internal const string MimeApplicationAtom = "application/atom+xml";
/// <summary>MIME type for ATOM Service Documents (http://tools.ietf.org/html/rfc5023#section-8).</summary>
internal const string MimeApplicationAtomService = "application/atomsvc+xml";
/// <summary>MIME type for JSON bodies (http://www.iana.org/assignments/media-types/application/).</summary>
internal const string MimeApplicationJson = "application/json";
/// <summary>MIME type general binary bodies (http://www.iana.org/assignments/media-types/application/).</summary>
internal const string MimeApplicationOctetStream = "application/octet-stream";
/// <summary>MIME type for batch requests - this mime type must be specified in CUD changesets or GET batch requests.</summary>
internal const string MimeApplicationHttp = "application/http";
/// <summary>'application' - MIME type for application types.</summary>
internal const string MimeApplicationType = "application";
/// <summary>MIME type for XML bodies.</summary>
internal const string MimeApplicationXml = "application/xml";
/// <summary>'json' - constant for MIME JSON subtypes.</summary>
internal const string MimeJsonSubType = "json";
/// <summary>"application/xml", MIME type for metadata requests.</summary>
internal const string MimeMetadata = MimeApplicationXml;
/// <summary>MIME type for changeset multipart/mixed</summary>
internal const string MimeMultiPartMixed = "multipart/mixed";
/// <summary>MIME type for plain text bodies.</summary>
internal const string MimeTextPlain = "text/plain";
/// <summary>'text' - MIME type for text subtypes.</summary>
internal const string MimeTextType = "text";
/// <summary>MIME type for XML bodies (deprecated).</summary>
internal const string MimeTextXml = "text/xml";
/// <summary>'xml' - constant for MIME xml subtypes.</summary>
internal const string MimeXmlSubType = "xml";
/// <summary>Content-Transfer-Encoding value for batch requests.</summary>
internal const string BatchRequestContentTransferEncoding = "binary";
#if ASTORIA_CLIENT
/// <summary>Link referring to a collection i.e. feed.</summary>
internal const string LinkMimeTypeFeed = "application/atom+xml;type=feed";
/// <summary>Link referring to an entity i.e. entry.</summary>
internal const string LinkMimeTypeEntry = "application/atom+xml;type=entry";
/// <summary>text for the utf8 encoding</summary>
internal const string Utf8Encoding = "UTF-8";
/// <summary>Default encoding used for writing textual media link entries</summary>
internal const string MimeTypeUtf8Encoding = ";charset=" + Utf8Encoding;
#endif
#endregion MIME constants.
#region URI constants.
/// <summary>A prefix that turns an absolute-path URI into an absolute-URI.</summary>
internal const string UriHttpAbsolutePrefix = "http://host";
/// <summary>A segment name in a URI that indicates metadata is being requested.</summary>
internal const string UriMetadataSegment = "$metadata";
/// <summary>A segment name in a URI that indicates a plain primitive value is being requested.</summary>
internal const string UriValueSegment = "$value";
/// <summary>A segment name in a URI that indicates metadata is being requested.</summary>
internal const string UriBatchSegment = "$batch";
/// <summary>A segment name in a URI that indicates that this is a link operation.</summary>
internal const string UriLinkSegment = "$links";
/// <summary>A segment name in a URI that indicates that this is a count operation.</summary>
internal const string UriCountSegment = "$count";
/// <summary>A const value for the query parameter $inlinecount to set counting mode to inline</summary>
internal const string UriRowCountAllOption = "allpages";
/// <summary>A const value for the query parameter $inlinecount to set counting mode to none</summary>
internal const string UriRowCountOffOption = "none";
#endregion URI constants.
#region WCF constants.
/// <summary>"Binary" - WCF element name for binary content in XML-wrapping streams.</summary>
internal const string WcfBinaryElementName = "Binary";
#endregion WCF constants.
#region ATOM constants
/// <summary>XML element name to mark content element in Atom.</summary>
internal const string AtomContentElementName = "content";
/// <summary>XML element name to mark entry element in Atom.</summary>
internal const string AtomEntryElementName = "entry";
/// <summary>XML element name to mark feed element in Atom.</summary>
internal const string AtomFeedElementName = "feed";
#if ASTORIA_CLIENT
/// <summary>'author' - XML element name for ATOM 'author' element for entries.</summary>
internal const string AtomAuthorElementName = "author";
/// <summary>'contributor' - XML element name for ATOM 'author' element for entries.</summary>
internal const string AtomContributorElementName = "contributor";
/// <summary>'category' - XML element name for ATOM 'category' element for entries.</summary>
internal const string AtomCategoryElementName = "category";
/// <summary>'scheme' - XML attribute name for ATOM 'scheme' attribute for categories.</summary>
internal const string AtomCategorySchemeAttributeName = "scheme";
/// <summary>'term' - XML attribute name for ATOM 'term' attribute for categories.</summary>
internal const string AtomCategoryTermAttributeName = "term";
/// <summary>XML element name to mark id element in Atom.</summary>
internal const string AtomIdElementName = "id";
/// <summary>XML element name to mark link element in Atom.</summary>
internal const string AtomLinkElementName = "link";
/// <summary>XML element name to mark link relation attribute in Atom.</summary>
internal const string AtomLinkRelationAttributeName = "rel";
/// <summary>Atom attribute that indicates the actual location for an entry's content.</summary>
internal const string AtomContentSrcAttributeName = "src";
/// <summary>XML element string for "next" links: [atom:link rel="next"]</summary>
internal const string AtomLinkNextAttributeString = "next";
#endif
/// <summary>Type of content for syndication property which can be one of Plaintext, Html or XHtml</summary>
internal const string MetadataAttributeEpmContentKind = "FC_ContentKind";
/// <summary>Whether to keep the property value in the content section</summary>
internal const string MetadataAttributeEpmKeepInContent = "FC_KeepInContent";
/// <summary>TargetNamespace prefix for non-syndication mapping</summary>
internal const string MetadataAttributeEpmNsPrefix = "FC_NsPrefix";
/// <summary>TargetNamespace URI for non-syndication mapping</summary>
internal const string MetadataAttributeEpmNsUri = "FC_NsUri";
/// <summary>Target element or attribute name</summary>
internal const string MetadataAttributeEpmTargetPath = "FC_TargetPath";
/// <summary>Source property name</summary>
internal const string MetadataAttributeEpmSourcePath = "FC_SourcePath";
/// <summary>author/email</summary>
internal const string SyndAuthorEmail = "SyndicationAuthorEmail";
/// <summary>author/name</summary>
internal const string SyndAuthorName = "SyndicationAuthorName";
/// <summary>author/uri</summary>
internal const string SyndAuthorUri = "SyndicationAuthorUri";
/// <summary>published</summary>
internal const string SyndPublished = "SyndicationPublished";
/// <summary>rights</summary>
internal const string SyndRights = "SyndicationRights";
/// <summary>summary</summary>
internal const string SyndSummary = "SyndicationSummary";
/// <summary>title</summary>
internal const string SyndTitle = "SyndicationTitle";
/// <summary>'updated' - XML element name for ATOM 'updated' element for entries.</summary>
internal const string AtomUpdatedElementName = "updated";
/// <summary>contributor/email</summary>
internal const string SyndContributorEmail = "SyndicationContributorEmail";
/// <summary>contributor/name</summary>
internal const string SyndContributorName = "SyndicationContributorName";
/// <summary>contributor/uri</summary>
internal const string SyndContributorUri = "SyndicationContributorUri";
/// <summary>updated</summary>
internal const string SyndUpdated = "SyndicationUpdated";
/// <summary>Plaintext</summary>
internal const string SyndContentKindPlaintext = "text";
/// <summary>HTML</summary>
internal const string SyndContentKindHtml = "html";
/// <summary>XHTML</summary>
internal const string SyndContentKindXHtml = "xhtml";
/// <summary>XML element name to mark href attribute element in Atom.</summary>
internal const string AtomHRefAttributeName = "href";
/// <summary>XML element name to mark summary element in Atom.</summary>
internal const string AtomSummaryElementName = "summary";
/// <summary>XML element name to mark author/name or contributor/name element in Atom.</summary>
internal const string AtomNameElementName = "name";
/// <summary>XML element name to mark author/email or contributor/email element in Atom.</summary>
internal const string AtomEmailElementName = "email";
/// <summary>XML element name to mark author/uri or contributor/uri element in Atom.</summary>
internal const string AtomUriElementName = "uri";
/// <summary>XML element name to mark published element in Atom.</summary>
internal const string AtomPublishedElementName = "published";
/// <summary>XML element name to mark rights element in Atom.</summary>
internal const string AtomRightsElementName = "rights";
/// <summary>XML element name to mark 'collection' element in APP.</summary>
internal const string AtomPublishingCollectionElementName = "collection";
/// <summary>XML element name to mark 'service' element in APP.</summary>
internal const string AtomPublishingServiceElementName = "service";
/// <summary>XML value for a default workspace in APP.</summary>
internal const string AtomPublishingWorkspaceDefaultValue = "Default";
/// <summary>XML element name to mark 'workspace' element in APP.</summary>
internal const string AtomPublishingWorkspaceElementName = "workspace";
/// <summary>XML element name to mark title element in Atom.</summary>
internal const string AtomTitleElementName = "title";
/// <summary>XML element name to mark title element in Atom.</summary>
internal const string AtomTypeAttributeName = "type";
/// <summary> Atom link relation attribute value for self links.</summary>
internal const string AtomSelfRelationAttributeValue = "self";
/// <summary> Atom link relation attribute value for edit links.</summary>
internal const string AtomEditRelationAttributeValue = "edit";
/// <summary> Atom link relation attribute value for edit-media links.</summary>
internal const string AtomEditMediaRelationAttributeValue = "edit-media";
/// <summary> Atom attribute which indicates the null value for the element.</summary>
internal const string AtomNullAttributeName = "null";
/// <summary> Atom attribute which indicates the etag value for the declaring entry element.</summary>
internal const string AtomETagAttributeName = "etag";
/// <summary>'Inline' - wrapping element for inlined entry/feed content.</summary>
internal const string AtomInlineElementName = "inline";
/// <summary>Element containing property values when 'content' is used for media link entries</summary>
internal const string AtomPropertiesElementName = "properties";
/// <summary>'count' element</summary>
internal const string RowCountElement = "count";
#endregion ATOM constants
#region XML constants.
/// <summary>'element', the XML element name for items in enumerations.</summary>
internal const string XmlCollectionItemElementName = "element";
/// <summary>XML element name for an error.</summary>
internal const string XmlErrorElementName = "error";
/// <summary>XML element name for an error code.</summary>
internal const string XmlErrorCodeElementName = "code";
/// <summary>XML element name for the inner error details.</summary>
internal const string XmlErrorInnerElementName = "innererror";
/// <summary>XML element name for an internal exception.</summary>
internal const string XmlErrorInternalExceptionElementName = "internalexception";
/// <summary>XML element name for an exception type.</summary>
internal const string XmlErrorTypeElementName = "type";
/// <summary>XML element name for an exception stack trace.</summary>
internal const string XmlErrorStackTraceElementName = "stacktrace";
/// <summary>XML element name for an error message.</summary>
internal const string XmlErrorMessageElementName = "message";
/// <summary>'false' literal, as used in XML.</summary>
internal const string XmlFalseLiteral = "false";
/// <summary>'true' literal, as used in XML.</summary>
internal const string XmlTrueLiteral = "true";
/// <summary>'INF' literal, as used in XML for infinity.</summary>
internal const string XmlInfinityLiteral = "INF";
/// <summary>'NaN' literal, as used in XML for not-a-number values.</summary>
internal const string XmlNaNLiteral = "NaN";
/// <summary>XML attribute value to indicate the base URI for a document or element.</summary>
internal const string XmlBaseAttributeName = "base";
/// <summary>'lang' XML attribute name for annotation language.</summary>
internal const string XmlLangAttributeName = "lang";
/// <summary>XML attribute name for whitespace parsing control.</summary>
internal const string XmlSpaceAttributeName = "space";
/// <summary>XML attribute value to indicate whitespace should be preserved.</summary>
internal const string XmlSpacePreserveValue = "preserve";
/// <summary>XML attribute name to pass to the XMLReader.GetValue API to get the xml:base attribute value.</summary>
internal const string XmlBaseAttributeNameWithPrefix = "xml:base";
#endregion XML constants.
#region XML namespaces.
/// <summary> Schema Namespace For Edm.</summary>
internal const string EdmV1Namespace = "http://schemas.microsoft.com/ado/2006/04/edm";
/// <summary> Schema Namespace For Edm 1.1.</summary>
internal const string EdmV1dot1Namespace = "http://schemas.microsoft.com/ado/2007/05/edm";
/// <summary> Schema Namespace For Edm 1.2.</summary>
internal const string EdmV1dot2Namespace = "http://schemas.microsoft.com/ado/2008/01/edm";
/// <summary> Schema Namespace For Edm 2.0.</summary>
internal const string EdmV2Namespace = "http://schemas.microsoft.com/ado/2008/09/edm";
/// <summary>XML namespace for data services.</summary>
internal const string DataWebNamespace = "http://schemas.microsoft.com/ado/2007/08/dataservices";
/// <summary>XML namespace for data service annotations.</summary>
internal const string DataWebMetadataNamespace = "http://schemas.microsoft.com/ado/2007/08/dataservices/metadata";
/// <summary>XML namespace for data service links.</summary>
internal const string DataWebRelatedNamespace = "http://schemas.microsoft.com/ado/2007/08/dataservices/related/";
/// <summary>ATOM Scheme Namespace For DataWeb.</summary>
internal const string DataWebSchemeNamespace = "http://schemas.microsoft.com/ado/2007/08/dataservices/scheme";
/// <summary>Schema Namespace for Atom Publishing Protocol.</summary>
internal const string AppNamespace = "http://www.w3.org/2007/app";
/// <summary> Schema Namespace For Atom.</summary>
internal const string AtomNamespace = "http://www.w3.org/2005/Atom";
/// <summary> Schema Namespace prefix For xmlns.</summary>
internal const string XmlnsNamespacePrefix = "xmlns";
/// <summary> Schema Namespace prefix For xml.</summary>
internal const string XmlNamespacePrefix = "xml";
/// <summary> Schema Namespace Prefix For DataWeb.</summary>
internal const string DataWebNamespacePrefix = "d";
/// <summary>'adsm' - namespace prefix for DataWebMetadataNamespace.</summary>
internal const string DataWebMetadataNamespacePrefix = "m";
/// <summary>'http://www.w3.org/2000/xmlns/' - namespace for namespace declarations.</summary>
internal const string XmlNamespacesNamespace = "http://www.w3.org/2000/xmlns/";
/// <summary> Edmx namespace in metadata document.</summary>
internal const string EdmxNamespace = "http://schemas.microsoft.com/ado/2007/06/edmx";
/// <summary> Prefix for Edmx Namespace in metadata document.</summary>
internal const string EdmxNamespacePrefix = "edmx";
#endregion XML namespaces.
#region CDM Schema Xml NodeNames
#region Constant node names in the CDM schema xml
/// <summary> Association Element Name in csdl.</summary>
internal const string Association = "Association";
/// <summary> AssociationSet Element Name in csdl.</summary>
internal const string AssociationSet = "AssociationSet";
/// <summary> ComplexType Element Name in csdl.</summary>
internal const string ComplexType = "ComplexType";
/// <summary> Dependent Element Name in csdl.</summary>
internal const string Dependent = "Dependent";
/// <summary>Format string to describe a collection of a given type.</summary>
internal const string EdmCollectionTypeFormat = "Collection({0})";
/// <summary>EntitySet attribute name in CSDL documents.</summary>
internal const string EdmEntitySetAttributeName = "EntitySet";
/// <summary>FunctionImport element name in CSDL documents.</summary>
internal const string EdmFunctionImportElementName = "FunctionImport";
/// <summary>Mode attribute name in CSDL documents.</summary>
internal const string EdmModeAttributeName = "Mode";
/// <summary>Mode attribute value for 'in' direction in CSDL documents.</summary>
internal const string EdmModeInValue = "In";
/// <summary>Parameter element name in CSDL documents.</summary>
internal const string EdmParameterElementName = "Parameter";
/// <summary>ReturnType attribute name in CSDL documents.</summary>
internal const string EdmReturnTypeAttributeName = "ReturnType";
/// <summary> End Element Name in csdl.</summary>
internal const string End = "End";
/// <summary> EntityType Element Name in csdl.</summary>
internal const string EntityType = "EntityType";
/// <summary> EntityContainer Element Name in csdl.</summary>
internal const string EntityContainer = "EntityContainer";
/// <summary> Key Element Name in csdl.</summary>
internal const string Key = "Key";
/// <summary> NavigationProperty Element Name in csdl.</summary>
internal const string NavigationProperty = "NavigationProperty";
/// <summary> OnDelete Element Name in csdl.</summary>
internal const string OnDelete = "OnDelete";
/// <summary> Principal Element Name in csdl.</summary>
internal const string Principal = "Principal";
/// <summary> Property Element Name in csdl.</summary>
internal const string Property = "Property";
/// <summary> PropetyRef Element Name in csdl.</summary>
internal const string PropertyRef = "PropertyRef";
/// <summary> ReferentialConstraint Element Name in csdl.</summary>
internal const string ReferentialConstraint = "ReferentialConstraint";
/// <summary> Role Element Name in csdl.</summary>
internal const string Role = "Role";
/// <summary> Schema Element Name in csdl.</summary>
internal const string Schema = "Schema";
/// <summary> Edmx Element Name in the metadata document.</summary>
internal const string EdmxElement = "Edmx";
/// <summary> Edmx DataServices Element Name in the metadata document.</summary>
internal const string EdmxDataServicesElement = "DataServices";
/// <summary>Version attribute for the root Edmx Element in the metadata document.</summary>
internal const string EdmxVersion = "Version";
/// <summary>Value of the version attribute in the root edmx element in metadata document.</summary>
internal const string EdmxVersionValue = "1.0";
#endregion //Constant node names in the CDM schema xml
#region const attribute names in the CDM schema XML
/// <summary> Action attribute Name in csdl.</summary>
internal const string Action = "Action";
/// <summary> BaseType attribute Name in csdl.</summary>
internal const string BaseType = "BaseType";
/// <summary> EntitySet attribute and Element Name in csdl.</summary>
internal const string EntitySet = "EntitySet";
/// <summary> FromRole attribute Name in csdl.</summary>
internal const string FromRole = "FromRole";
/// <summary>Abstract attribute Name in csdl.</summary>
internal const string Abstract = "Abstract";
/// <summary>Multiplicity attribute Name in csdl.</summary>
internal const string Multiplicity = "Multiplicity";
/// <summary>Name attribute Name in csdl.</summary>
internal const string Name = "Name";
/// <summary>Namespace attribute Element Name in csdl.</summary>
internal const string Namespace = "Namespace";
/// <summary>ToRole attribute Name in csdl.</summary>
internal const string ToRole = "ToRole";
/// <summary>Type attribute Name in csdl.</summary>
internal const string Type = "Type";
/// <summary>Relationship attribute Name in csdl.</summary>
internal const string Relationship = "Relationship";
#endregion //const attribute names in the CDM schema XML
#region values for multiplicity in Edm
/// <summary>Value for Many multiplicity in csdl.</summary>
internal const string Many = "*";
/// <summary>Value for One multiplicity in csdl.</summary>
internal const string One = "1";
/// <summary>Value for ZeroOrOne multiplicity in csdl.</summary>
internal const string ZeroOrOne = "0..1";
#endregion
#region Edm Facets Names and Values
/// <summary>Nullable Facet Name in csdl.</summary>
internal const string Nullable = "Nullable";
/// <summary>Name of the concurrency attribute.</summary>
internal const string ConcurrencyAttribute = "ConcurrencyMode";
/// <summary>Value of the concurrency attribute.</summary>
internal const string ConcurrencyFixedValue = "Fixed";
#endregion
#endregion // CDM Schema Xml NodeNames
#region DataWeb Elements and Attributes.
/// <summary>'MimeType' - attribute name for property MIME type attributes.</summary>
internal const string DataWebMimeTypeAttributeName = "MimeType";
/// <summary>'OpenType' - attribute name to indicate a type is an OpenType property.</summary>
internal const string DataWebOpenTypeAttributeName = "OpenType";
/// <summary>'HasStream' - attribute name to indicate a type has a default stream property.</summary>
internal const string DataWebAccessHasStreamAttribute = "HasStream";
/// <summary>'true' - attribute value to indicate a type has a default stream property.</summary>
internal const string DataWebAccessDefaultStreamPropertyValue = "true";
/// <summary>Attribute to indicate whether this is a default entity container or not.</summary>
internal const string IsDefaultEntityContainerAttribute = "IsDefaultEntityContainer";
/// <summary>Attribute name in the csdl to indicate whether the service operation must be called using POST or GET verb.</summary>
internal const string ServiceOperationHttpMethodName = "HttpMethod";
/// <summary>uri element name for link bind/unbind operations</summary>
internal const string UriElementName = "uri";
/// <summary>next element name for link paging</summary>
internal const string NextElementName = "next";
/// <summary>XML element name for writing out collection of links.</summary>
internal const string LinkCollectionElementName = "links";
#endregion DataWeb Elements and Attributes.
#region JSON Format constants
/// <summary>JSON property name for an error.</summary>
internal const string JsonError = "error";
/// <summary>JSON property name for an error code.</summary>
internal const string JsonErrorCode = "code";
/// <summary>JSON property name for the inner error details.</summary>
internal const string JsonErrorInner = "innererror";
/// <summary>JSON property name for an internal exception.</summary>
internal const string JsonErrorInternalException = "internalexception";
/// <summary>JSON property name for an error message.</summary>
internal const string JsonErrorMessage = "message";
/// <summary>JSON property name for an exception stack trace.</summary>
internal const string JsonErrorStackTrace = "stacktrace";
/// <summary>JSON property name for an exception type.</summary>
internal const string JsonErrorType = "type";
/// <summary>JSON property name for an error message value.</summary>
internal const string JsonErrorValue = "value";
/// <summary>metadata element name in json payload.</summary>
internal const string JsonMetadataString = "__metadata";
/// <summary>uri element name in json payload.</summary>
internal const string JsonUriString = "uri";
/// <summary>type element name in json payload.</summary>
internal const string JsonTypeString = "type";
/// <summary>edit_media element name in json payload.</summary>
internal const string JsonEditMediaString = "edit_media";
/// <summary>media_src element name in json payload.</summary>
internal const string JsonMediaSrcString = "media_src";
/// <summary>content_type element name in json payload.</summary>
internal const string JsonContentTypeString = "content_type";
/// <summary>media_etag element name in json payload.</summary>
internal const string JsonMediaETagString = "media_etag";
/// <summary>deferred element name in json payload.</summary>
internal const string JsonDeferredString = "__deferred";
/// <summary>etag element name in json payload.</summary>
internal const string JsonETagString = "etag";
/// <summary>row count element name in json payload</summary>
internal const string JsonRowCountString = "__count";
/// <summary>next page link element name in json payload</summary>
internal const string JsonNextString = "__next";
#endregion //JSON Format constants
#region Edm Primitive Type Names
/// <summary>namespace for edm primitive types.</summary>
internal const string EdmNamespace = "Edm";
/// <summary>edm binary primitive type name</summary>
internal const string EdmBinaryTypeName = "Edm.Binary";
/// <summary>edm boolean primitive type name</summary>
internal const string EdmBooleanTypeName = "Edm.Boolean";
/// <summary>edm byte primitive type name</summary>
internal const string EdmByteTypeName = "Edm.Byte";
/// <summary>edm datetime primitive type name</summary>
internal const string EdmDateTimeTypeName = "Edm.DateTime";
/// <summary>edm decimal primitive type name</summary>
internal const string EdmDecimalTypeName = "Edm.Decimal";
/// <summary>edm double primitive type name</summary>
internal const string EdmDoubleTypeName = "Edm.Double";
/// <summary>edm guid primitive type name</summary>
internal const string EdmGuidTypeName = "Edm.Guid";
/// <summary>edm single primitive type name</summary>
internal const string EdmSingleTypeName = "Edm.Single";
/// <summary>edm sbyte primitive type name</summary>
internal const string EdmSByteTypeName = "Edm.SByte";
/// <summary>edm int16 primitive type name</summary>
internal const string EdmInt16TypeName = "Edm.Int16";
/// <summary>edm int32 primitive type name</summary>
internal const string EdmInt32TypeName = "Edm.Int32";
/// <summary>edm int64 primitive type name</summary>
internal const string EdmInt64TypeName = "Edm.Int64";
/// <summary>edm string primitive type name</summary>
internal const string EdmStringTypeName = "Edm.String";
#endregion
#region Astoria Constants
/// <summary>'1.0' - the version 1.0 text for a data service.</summary>
internal const string DataServiceVersion1Dot0 = "1.0";
/// <summary>'2.0' - the version 2.0 text for a data service.</summary>
internal const string DataServiceVersion2Dot0 = "2.0";
/// <summary>'2.0;' - the text for the current server version text.</summary>
internal const string DataServiceVersionCurrent = DataServiceVersion2Dot0 + ";";
/// <summary>1 - the version 1 text for a data service.</summary>
internal const int DataServiceVersionCurrentMajor = 1;
/// <summary>0 - the current minor version for a data service.</summary>
internal const int DataServiceVersionCurrentMinor = 0;
/// <summary>'binary' constant prefixed to binary literals.</summary>
internal const string LiteralPrefixBinary = "binary";
/// <summary>'datetime' constant prefixed to datetime literals.</summary>
internal const string LiteralPrefixDateTime = "datetime";
/// <summary>'guid' constant prefixed to guid literals.</summary>
internal const string LiteralPrefixGuid = "guid";
/// <summary>'X': Prefix to binary type string representation.</summary>
internal const string XmlBinaryPrefix = "X";
/// <summary>'M': Suffix for decimal type's string representation</summary>
internal const string XmlDecimalLiteralSuffix = "M";
/// <summary>'L': Suffix for long (int64) type's string representation</summary>
internal const string XmlInt64LiteralSuffix = "L";
/// <summary>'f': Suffix for float (single) type's string representation</summary>
internal const string XmlSingleLiteralSuffix = "f";
/// <summary>'D': Suffix for double (Real) type's string representation</summary>
internal const string XmlDoubleLiteralSuffix = "D";
/// <summary>null liternal that needs to be return in ETag value when the value is null</summary>
internal const string NullLiteralInETag = "null";
/// <summary>Incoming message property name for the original reqeust uri</summary>
internal const string MicrosoftDataServicesRequestUri = "MicrosoftDataServicesRequestUri";
/// <summary>Incoming message property name for the original root service uri</summary>
internal const string MicrosoftDataServicesRootUri = "MicrosoftDataServicesRootUri";
#endregion // Astoria Constants
#region EF constants
/// <summary>Full name for the StoreGeneratedPattern attribute in csdl</summary>
internal const string StoreGeneratedPattern = "http://schemas.microsoft.com/ado/2006/04/edm/ssdl:StoreGeneratedPattern";
#endregion //EF constants
}
}
| |
using System.ComponentModel;
using System.Runtime.Serialization;
namespace EncompassRest.Loans.Enums
{
/// <summary>
/// FannieARMIndexType
/// </summary>
public enum FannieARMIndexType
{
/// <summary>
/// 6MonthTreasuryBillAuctionHighMonthlyAverage
/// </summary>
[EnumMember(Value = "6MonthTreasuryBillAuctionHighMonthlyAverage")]
n6MonthTreasuryBillAuctionHighMonthlyAverage = 0,
/// <summary>
/// 1YearTreasuryConstantMaturitiesMonthlyAverage
/// </summary>
[EnumMember(Value = "1YearTreasuryConstantMaturitiesMonthlyAverage")]
n1YearTreasuryConstantMaturitiesMonthlyAverage = 1,
/// <summary>
/// 3YearTreasuryConstantMaturitiesMonthlyAverage
/// </summary>
[EnumMember(Value = "3YearTreasuryConstantMaturitiesMonthlyAverage")]
n3YearTreasuryConstantMaturitiesMonthlyAverage = 2,
/// <summary>
/// 5YearTreasuryConstantMaturitiesMonthlyAverage
/// </summary>
[EnumMember(Value = "5YearTreasuryConstantMaturitiesMonthlyAverage")]
n5YearTreasuryConstantMaturitiesMonthlyAverage = 3,
/// <summary>
/// NationalAverageContractMortgageRateForThePurchaseOfPreviouslyOccupiedHomes
/// </summary>
NationalAverageContractMortgageRateForThePurchaseOfPreviouslyOccupiedHomes = 4,
/// <summary>
/// 6MonthTreasuryBillAuctionHighDiscountRate
/// </summary>
[EnumMember(Value = "6MonthTreasuryBillAuctionHighDiscountRate")]
n6MonthTreasuryBillAuctionHighDiscountRate = 5,
/// <summary>
/// 1YearTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "1YearTreasuryConstantMaturitiesWeeklyAverage")]
n1YearTreasuryConstantMaturitiesWeeklyAverage = 6,
/// <summary>
/// 3YearTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "3YearTreasuryConstantMaturitiesWeeklyAverage")]
n3YearTreasuryConstantMaturitiesWeeklyAverage = 7,
/// <summary>
/// 5YearTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "5YearTreasuryConstantMaturitiesWeeklyAverage")]
n5YearTreasuryConstantMaturitiesWeeklyAverage = 8,
/// <summary>
/// 6MonthMoneyMarketComBankWeekly
/// </summary>
[EnumMember(Value = "6MonthMoneyMarketComBankWeekly")]
n6MonthMoneyMarketComBankWeekly = 9,
/// <summary>
/// FHLBB11thDistrictCostOfFundsMonthlyAverage
/// </summary>
FHLBB11thDistrictCostOfFundsMonthlyAverage = 10,
/// <summary>
/// 10YearTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "10YearTreasuryConstantMaturitiesWeeklyAverage")]
n10YearTreasuryConstantMaturitiesWeeklyAverage = 11,
/// <summary>
/// NationalMonthlyMedianCostOfFundsRateMonthlyAverage (Retired 8-25-14)
/// </summary>
[Description("NationalMonthlyMedianCostOfFundsRateMonthlyAverage (Retired 8-25-14)")]
NationalMonthlyMedianCostOfFundsRateMonthlyAverage = 12,
/// <summary>
/// 6MonthTreasuryBillAuctionHighInvestmentRate
/// </summary>
[EnumMember(Value = "6MonthTreasuryBillAuctionHighInvestmentRate")]
n6MonthTreasuryBillAuctionHighInvestmentRate = 13,
/// <summary>
/// 11thDistrictDailyGuarRate30DayCom5Yr
/// </summary>
[EnumMember(Value = "11thDistrictDailyGuarRate30DayCom5Yr")]
n11thDistrictDailyGuarRate30DayCom5Yr = 14,
/// <summary>
/// 5YearFHLBNYAdvanceRateDaily
/// </summary>
[EnumMember(Value = "5YearFHLBNYAdvanceRateDaily")]
n5YearFHLBNYAdvanceRateDaily = 15,
/// <summary>
/// 6MonthTreasuryBillSecondaryMarketWeeklyAverage
/// </summary>
[EnumMember(Value = "6MonthTreasuryBillSecondaryMarketWeeklyAverage")]
n6MonthTreasuryBillSecondaryMarketWeeklyAverage = 16,
/// <summary>
/// 2YearTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "2YearTreasuryConstantMaturitiesWeeklyAverage")]
n2YearTreasuryConstantMaturitiesWeeklyAverage = 17,
/// <summary>
/// CumulativeAverageforthePrevious4WeeksWeeklyAverage (Retired 2Q-2016)
/// </summary>
[Description("CumulativeAverageforthePrevious4WeeksWeeklyAverage (Retired 2Q-2016)")]
CumulativeAverageforthePrevious4WeeksWeeklyAverage = 18,
/// <summary>
/// CumulativeAverageforthePrevious4WeeksWeeklyAverage6MonthAuctionHighInvestmentRate
/// </summary>
CumulativeAverageforthePrevious4WeeksWeeklyAverage6MonthAuctionHighInvestmentRate = 19,
/// <summary>
/// CumulativeAverageforthePrevious26WeeksWeeklyAverage
/// </summary>
CumulativeAverageforthePrevious26WeeksWeeklyAverage = 20,
/// <summary>
/// CumulativeAverageforthePrevious4WeeksWeeklyAverage6MonthAuctionHighDiscRate
/// </summary>
CumulativeAverageforthePrevious4WeeksWeeklyAverage6MonthAuctionHighDiscRate = 21,
/// <summary>
/// 1MonthWallStreetJournalLIBORRateMonthly
/// </summary>
[EnumMember(Value = "1MonthWallStreetJournalLIBORRateMonthly")]
n1MonthWallStreetJournalLIBORRateMonthly = 22,
/// <summary>
/// FHFB11thDistrictCOFCumulativeAverageForThePrevious12Months
/// </summary>
FHFB11thDistrictCOFCumulativeAverageForThePrevious12Months = 23,
/// <summary>
/// WallStreetJournalPrimeRate
/// </summary>
WallStreetJournalPrimeRate = 24,
/// <summary>
/// 1YearTreasuryBill364DayDiscountRate (Retired 2Q-2016)
/// </summary>
[Description("1YearTreasuryBill364DayDiscountRate (Retired 2Q-2016)")]
[EnumMember(Value = "1YearTreasuryBill364DayDiscountRate")]
n1YearTreasuryBill364DayDiscountRate = 25,
/// <summary>
/// 1YearTreasuryBill364DayDiscountRateMonthly
/// </summary>
[EnumMember(Value = "1YearTreasuryBill364DayDiscountRateMonthly")]
n1YearTreasuryBill364DayDiscountRateMonthly = 26,
/// <summary>
/// 1YearTreasuryBill364DayDiscountRateWeeklyAuctionAverage
/// </summary>
[EnumMember(Value = "1YearTreasuryBill364DayDiscountRateWeeklyAuctionAverage")]
n1YearTreasuryBill364DayDiscountRateWeeklyAuctionAverage = 27,
/// <summary>
/// 7YearTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "7YearTreasuryConstantMaturitiesWeeklyAverage")]
n7YearTreasuryConstantMaturitiesWeeklyAverage = 28,
/// <summary>
/// 1YearFHLBBostonAdvanceRateDaily
/// </summary>
[EnumMember(Value = "1YearFHLBBostonAdvanceRateDaily")]
n1YearFHLBBostonAdvanceRateDaily = 29,
/// <summary>
/// FederalReserve6MonthCDRateDaily
/// </summary>
FederalReserve6MonthCDRateDaily = 30,
/// <summary>
/// SemiAnnualCOFforCaliforniaSemiannualAverage
/// </summary>
SemiAnnualCOFforCaliforniaSemiannualAverage = 31,
/// <summary>
/// 6MonthCDsSecondaryMarketWeeklyAverage
/// </summary>
[EnumMember(Value = "6MonthCDsSecondaryMarketWeeklyAverage")]
n6MonthCDsSecondaryMarketWeeklyAverage = 32,
/// <summary>
/// CumulativeAverageForThePrevious12MonthOfThe6MonthTBillMonthlyAverage
/// </summary>
CumulativeAverageForThePrevious12MonthOfThe6MonthTBillMonthlyAverage = 33,
/// <summary>
/// Blend50PercentFRM50PercentWeekly1YearTreasurySecurityWeeklyAverage
/// </summary>
Blend50PercentFRM50PercentWeekly1YearTreasurySecurityWeeklyAverage = 34,
/// <summary>
/// Blend75PercentFRM25PercentWeekly1YearTreasurySecurityWeeklyAverage
/// </summary>
Blend75PercentFRM25PercentWeekly1YearTreasurySecurityWeeklyAverage = 35,
/// <summary>
/// 6MonthCDsSecondaryMarketMonthlyAverage
/// </summary>
[EnumMember(Value = "6MonthCDsSecondaryMarketMonthlyAverage")]
n6MonthCDsSecondaryMarketMonthlyAverage = 36,
/// <summary>
/// CumulativeAverageForThePrevious12Months1YearTreasurySecurityMonthlyAverage
/// </summary>
CumulativeAverageForThePrevious12Months1YearTreasurySecurityMonthlyAverage = 37,
/// <summary>
/// 6MonthWallStreetJournalLIBORRateMonthlyAverage
/// </summary>
[EnumMember(Value = "6MonthWallStreetJournalLIBORRateMonthlyAverage")]
n6MonthWallStreetJournalLIBORRateMonthlyAverage = 38,
/// <summary>
/// SemiAnnualCOFforthe11thDistrictSemiannualAverage
/// </summary>
SemiAnnualCOFforthe11thDistrictSemiannualAverage = 39,
/// <summary>
/// NationalAverageCOFRatioforSAIFInsuredInstitutionsQuarterlyAverage (Retired 2Q-2016)
/// </summary>
[Description("NationalAverageCOFRatioforSAIFInsuredInstitutionsQuarterlyAverage (Retired 2Q-2016)")]
NationalAverageCOFRatioforSAIFInsuredInstitutionsQuarterlyAverage = 40,
/// <summary>
/// QuarterlyAverageOfTheFederalCOFI
/// </summary>
QuarterlyAverageOfTheFederalCOFI = 41,
/// <summary>
/// MonthlyAverageOfTheWeeklyAverage1YearTreasurySecurityMonthlyAverage
/// </summary>
MonthlyAverageOfTheWeeklyAverage1YearTreasurySecurityMonthlyAverage = 42,
/// <summary>
/// NationalAverageEffectiveRateoOfAllMortgageloansMonthlyAverage
/// </summary>
NationalAverageEffectiveRateoOfAllMortgageloansMonthlyAverage = 43,
/// <summary>
/// WallStreetJournal6MonthLIBOR
/// </summary>
WallStreetJournal6MonthLIBOR = 44,
/// <summary>
/// FreddieMac30Year60DayRequiredNetYieldsDaily
/// </summary>
FreddieMac30Year60DayRequiredNetYieldsDaily = 45,
/// <summary>
/// 6MonthWallStreetJournalLIBORRateDaily
/// </summary>
[EnumMember(Value = "6MonthWallStreetJournalLIBORRateDaily")]
n6MonthWallStreetJournalLIBORRateDaily = 46,
/// <summary>
/// 1MonthWallStreetJournalLIBORRateDaily
/// </summary>
[EnumMember(Value = "1MonthWallStreetJournalLIBORRateDaily")]
n1MonthWallStreetJournalLIBORRateDaily = 47,
/// <summary>
/// 3MonthWallStreetJournalLIBORRateMonthly
/// </summary>
[EnumMember(Value = "3MonthWallStreetJournalLIBORRateMonthly")]
n3MonthWallStreetJournalLIBORRateMonthly = 48,
/// <summary>
/// 1MonthCDRateSecondaryMarketWeeklyAverage
/// </summary>
[EnumMember(Value = "1MonthCDRateSecondaryMarketWeeklyAverage")]
n1MonthCDRateSecondaryMarketWeeklyAverage = 49,
/// <summary>
/// 6MonthBritishBankersAssociationLIBORRateMonthly
/// </summary>
[EnumMember(Value = "6MonthBritishBankersAssociationLIBORRateMonthly")]
n6MonthBritishBankersAssociationLIBORRateMonthly = 50,
/// <summary>
/// 6MonthTBillAuctionHighDiscountRate
/// </summary>
[EnumMember(Value = "6MonthTBillAuctionHighDiscountRate")]
n6MonthTBillAuctionHighDiscountRate = 51,
/// <summary>
/// 3MonthBritishBankersAssociationLIBORRateMonthly
/// </summary>
[EnumMember(Value = "3MonthBritishBankersAssociationLIBORRateMonthly")]
n3MonthBritishBankersAssociationLIBORRateMonthly = 52,
/// <summary>
/// 1MonthBritishBankersAssociationLIBORRateMonthly
/// </summary>
[EnumMember(Value = "1MonthBritishBankersAssociationLIBORRateMonthly")]
n1MonthBritishBankersAssociationLIBORRateMonthly = 53,
/// <summary>
/// 1YearTreasuryConstantMaturitiesMonthlyAverageCumulativeAveragefortheprevious12MonthsMonthlyAverage
/// </summary>
[EnumMember(Value = "1YearTreasuryConstantMaturitiesMonthlyAverageCumulativeAveragefortheprevious12MonthsMonthlyAverage")]
n1YearTreasuryConstantMaturitiesMonthlyAverageCumulativeAveragefortheprevious12MonthsMonthlyAverage = 54,
/// <summary>
/// 30year60DayConventionalFixedRateDaily
/// </summary>
[EnumMember(Value = "30year60DayConventionalFixedRateDaily")]
n30year60DayConventionalFixedRateDaily = 55,
/// <summary>
/// 1YearWallStreetJournalLIBORRateDaily
/// </summary>
[EnumMember(Value = "1YearWallStreetJournalLIBORRateDaily")]
n1YearWallStreetJournalLIBORRateDaily = 56,
/// <summary>
/// FreddieMacCostofFunds
/// </summary>
FreddieMacCostofFunds = 57,
/// <summary>
/// 12MonthRollingAverageOfThe3MonthCD
/// </summary>
[EnumMember(Value = "12MonthRollingAverageOfThe3MonthCD")]
n12MonthRollingAverageOfThe3MonthCD = 58,
/// <summary>
/// 6MonthTreasuryConstantMaturitiesWeeklyAverage
/// </summary>
[EnumMember(Value = "6MonthTreasuryConstantMaturitiesWeeklyAverage")]
n6MonthTreasuryConstantMaturitiesWeeklyAverage = 59,
/// <summary>
/// 30DayFRMConvPostedYieldAA
/// </summary>
[EnumMember(Value = "30DayFRMConvPostedYieldAA")]
n30DayFRMConvPostedYieldAA = 60,
/// <summary>
/// 30DayFRMConvPostedYieldSA
/// </summary>
[EnumMember(Value = "30DayFRMConvPostedYieldSA")]
n30DayFRMConvPostedYieldSA = 61,
/// <summary>
/// 60DayFRMConvPostedYieldAA
/// </summary>
[EnumMember(Value = "60DayFRMConvPostedYieldAA")]
n60DayFRMConvPostedYieldAA = 62,
/// <summary>
/// 60DayFRMConvPostedYieldSA
/// </summary>
[EnumMember(Value = "60DayFRMConvPostedYieldSA")]
n60DayFRMConvPostedYieldSA = 63,
/// <summary>
/// 6MonthTreasuryConstantMaturitySecDaily
/// </summary>
[EnumMember(Value = "6MonthTreasuryConstantMaturitySecDaily")]
n6MonthTreasuryConstantMaturitySecDaily = 64,
/// <summary>
/// 3MonthWallStreetJournalLIBORrateDaily
/// </summary>
[EnumMember(Value = "3MonthWallStreetJournalLIBORrateDaily")]
n3MonthWallStreetJournalLIBORrateDaily = 65,
/// <summary>
/// WellsFargoPrimeRateOnBloomberg
/// </summary>
WellsFargoPrimeRateOnBloomberg = 66,
/// <summary>
/// 1MonthBritishBankersAssociationLIBOROnBloomberg
/// </summary>
[EnumMember(Value = "1MonthBritishBankersAssociationLIBOROnBloomberg")]
n1MonthBritishBankersAssociationLIBOROnBloomberg = 67,
/// <summary>
/// 3MonthBritishBankersAssociationLIBOROnBloomberg
/// </summary>
[EnumMember(Value = "3MonthBritishBankersAssociationLIBOROnBloomberg")]
n3MonthBritishBankersAssociationLIBOROnBloomberg = 68,
/// <summary>
/// 1YearWallStreetJournalLIBORRateMonthly
/// </summary>
[EnumMember(Value = "1YearWallStreetJournalLIBORRateMonthly")]
n1YearWallStreetJournalLIBORRateMonthly = 69,
/// <summary>
/// 10YearTreasuryConstantMaturityMonthly
/// </summary>
[EnumMember(Value = "10YearTreasuryConstantMaturityMonthly")]
n10YearTreasuryConstantMaturityMonthly = 70,
/// <summary>
/// 1YearCMTMonthlyLast4WeeksofWeekly
/// </summary>
[EnumMember(Value = "1YearCMTMonthlyLast4WeeksofWeekly")]
n1YearCMTMonthlyLast4WeeksofWeekly = 71,
/// <summary>
/// 5YearCMTMonthlyLast4WeeksofWeekly
/// </summary>
[EnumMember(Value = "5YearCMTMonthlyLast4WeeksofWeekly")]
n5YearCMTMonthlyLast4WeeksofWeekly = 72,
/// <summary>
/// 1MonthTreasuryConstantMaturityWeekly
/// </summary>
[EnumMember(Value = "1MonthTreasuryConstantMaturityWeekly")]
n1MonthTreasuryConstantMaturityWeekly = 73,
/// <summary>
/// 6MonthBritishBankersLIBORDaily
/// </summary>
[EnumMember(Value = "6MonthBritishBankersLIBORDaily")]
n6MonthBritishBankersLIBORDaily = 74,
/// <summary>
/// WellsFargoCostofSavingsMonthly
/// </summary>
WellsFargoCostofSavingsMonthly = 75,
/// <summary>
/// 30-Day Average SOFR
/// </summary>
[Description("30-Day Average SOFR")]
[EnumMember(Value = "SOFR_30DayAvg")]
SOFR30DayAvg = 76
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using OLEDB.Test.ModuleCore;
using System.IO;
using System.Text;
using XmlCoreTest.Common;
namespace System.Xml.Tests
{
[InheritRequired()]
public abstract partial class TCReadContentAsBase64 : TCXMLReaderBaseGeneral
{
public const string ST_ELEM_NAME1 = "ElemAll";
public const string ST_ELEM_NAME2 = "ElemEmpty";
public const string ST_ELEM_NAME3 = "ElemNum";
public const string ST_ELEM_NAME4 = "ElemText";
public const string ST_ELEM_NAME5 = "ElemNumText";
public const string ST_ELEM_NAME6 = "ElemLong";
public const string Base64Xml = "Base64.xml";
public const string strTextBase64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
public const string strNumBase64 = "0123456789+/";
public override int Init(object objParam)
{
int ret = base.Init(objParam);
CreateTestFile(EREADER_TYPE.BASE64_TEST);
return ret;
}
public override int Terminate(object objParam)
{
DataReader.Close();
return base.Terminate(objParam);
}
private bool VerifyInvalidReadBase64(int iBufferSize, int iIndex, int iCount, Type exceptionType)
{
bool bPassed = false;
byte[] buffer = new byte[iBufferSize];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME1);
DataReader.Read();
if (CheckCanReadBinaryContent()) return true;
try
{
DataReader.ReadContentAsBase64(buffer, iIndex, iCount);
}
catch (Exception e)
{
CError.WriteLine("Actual exception:{0}", e.GetType().ToString());
CError.WriteLine("Expected exception:{0}", exceptionType.ToString());
bPassed = (e.GetType().ToString() == exceptionType.ToString());
}
return bPassed;
}
protected void TestOnInvalidNodeType(XmlNodeType nt)
{
ReloadSource();
PositionOnNodeType(nt);
if (CheckCanReadBinaryContent()) return;
try
{
byte[] buffer = new byte[1];
int nBytes = DataReader.ReadContentAsBase64(buffer, 0, 1);
}
catch (InvalidOperationException ioe)
{
if (ioe.ToString().IndexOf(nt.ToString()) < 0)
CError.Compare(false, "Call threw wrong invalid operation exception on " + nt);
else
return;
}
CError.Compare(false, "Call succeeded on " + nt);
}
protected void TestOnNopNodeType(XmlNodeType nt)
{
ReloadSource();
PositionOnNodeType(nt);
string name = DataReader.Name;
string value = DataReader.Value;
CError.WriteLine("Name=" + name);
CError.WriteLine("Value=" + value);
if (CheckCanReadBinaryContent()) return;
byte[] buffer = new byte[1];
int nBytes = DataReader.ReadContentAsBase64(buffer, 0, 1);
CError.Compare(nBytes, 0, "nBytes");
CError.Compare(DataReader.VerifyNode(nt, name, value), "vn");
CError.WriteLine("Succeeded:{0}", nt);
}
////////////////////////////////////////////////////////////////
// Variations
////////////////////////////////////////////////////////////////
[Variation("ReadBase64 Element with all valid value")]
public int TestReadBase64_1()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME1);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, (strTextBase64 + strNumBase64), "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with all valid Num value", Pri = 0)]
public int TestReadBase64_2()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME3);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, strNumBase64, "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with all valid Text value")]
public int TestReadBase64_3()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME4);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, strTextBase64, "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with all valid value (from concatenation), Pri=0")]
public int TestReadBase64_5()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME5);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, (strTextBase64 + strNumBase64), "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with Long valid value (from concatenation), Pri=0")]
public int TestReadBase64_6()
{
int base64len = 0;
byte[] base64 = new byte[2000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME6);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
string strExpbase64 = "";
for (int i = 0; i < 10; i++)
strExpbase64 += (strTextBase64 + strNumBase64);
CError.Compare(strActbase64, strExpbase64, "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 with count > buffer size")]
public int ReadBase64_7()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 0, 6, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with count < 0")]
public int ReadBase64_8()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 2, -1, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with index > buffer size")]
public int ReadBase64_9()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 5, 1, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with index < 0")]
public int ReadBase64_10()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, -1, 1, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with index + count exceeds buffer")]
public int ReadBase64_11()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 0, 10, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 index & count =0")]
public int ReadBase64_12()
{
byte[] buffer = new byte[5];
int iCount = 0;
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME1);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
iCount = DataReader.ReadContentAsBase64(buffer, 0, 0);
CError.Compare(iCount, 0, "has to be zero");
return TEST_PASS;
}
[Variation("ReadBase64 Element multiple into same buffer (using offset), Pri=0")]
public int TestReadBase64_13()
{
int base64len = 20;
byte[] base64 = new byte[base64len];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME4);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
DataReader.ReadContentAsBase64(base64, i, 2);
strActbase64 = (System.BitConverter.ToChar(base64, i)).ToString();
CError.Compare(String.Compare(strActbase64, 0, strTextBase64, i / 2, 1), 0, "Compare All Valid Base64");
}
return TEST_PASS;
}
[Variation("ReadBase64 with buffer == null")]
public int TestReadBase64_14()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME4);
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
try
{
DataReader.ReadContentAsBase64(null, 0, 0);
}
catch (ArgumentNullException)
{
return TEST_PASS;
}
return TEST_FAIL;
}
[Variation("ReadBase64 after failure")]
public int TestReadBase64_15()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement("ElemErr");
DataReader.Read();
var line = ((IXmlLineInfo)DataReader.Internal).LinePosition;
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[10];
int nRead = 0;
try
{
nRead = DataReader.ReadContentAsBase64(buffer, 0, 1);
return TEST_FAIL;
}
catch (XmlException e)
{
if (IsXmlNodeReader() || IsXmlNodeReaderDataDoc() || IsXPathNavigatorReader() || IsXmlValidatingReader() || IsCharCheckingReader())
CheckException("Xml_InvalidBase64Value", e);
else
{
CheckXmlException("Xml_UserException", e, 1, line);
}
}
return TEST_PASS;
}
[Variation("Read after partial ReadBase64", Pri = 0)]
public int TestReadBase64_16()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement("ElemNum");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[10];
int nRead = DataReader.ReadContentAsBase64(buffer, 0, 8);
CError.Compare(nRead, 8, "0");
DataReader.Read();
CError.Compare(DataReader.NodeType, XmlNodeType.Element, "1vn");
return TEST_PASS;
}
[Variation("Current node on multiple calls")]
public int TestReadBase64_17()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement("ElemNum");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[30];
int nRead = DataReader.ReadContentAsBase64(buffer, 0, 2);
CError.Compare(nRead, 2, "0");
nRead = DataReader.ReadContentAsBase64(buffer, 0, 23);
CError.Compare(nRead, 22, "1");
DataReader.Read();
CError.Compare(DataReader.NodeType, XmlNodeType.Element, "Nodetype not end element");
CError.Compare(DataReader.Name, "ElemText", "Nodetype not end element");
return TEST_PASS;
}
[Variation("ReadBase64 with incomplete sequence")]
public int TestTextReadBase64_23()
{
byte[] expected = new byte[] { 0, 16, 131, 16, 81 };
byte[] buffer = new byte[10];
string strxml = "<r><ROOT>ABCDEFG</ROOT></r>";
ReloadSourceStr(strxml);
DataReader.PositionOnElement("ROOT");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
int result = 0;
int nRead;
while ((nRead = DataReader.ReadContentAsBase64(buffer, result, 1)) > 0)
result += nRead;
CError.Compare(result, expected.Length, "res");
for (int i = 0; i < result; i++)
CError.Compare(buffer[i], expected[i], "buffer[" + i + "]");
return TEST_PASS;
}
[Variation("ReadBase64 when end tag doesn't exist")]
public int TestTextReadBase64_24()
{
if (IsRoundTrippedReader())
return TEST_SKIPPED;
byte[] buffer = new byte[5000];
string strxml = "<B>" + new string('c', 5000);
ReloadSourceStr(strxml);
DataReader.PositionOnElement("B");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
try
{
DataReader.ReadContentAsBase64(buffer, 0, 5000);
CError.WriteLine("Accepted incomplete element");
return TEST_FAIL;
}
catch (XmlException e)
{
CheckXmlException("Xml_UnexpectedEOFInElementContent", e, 1, 5004);
}
return TEST_PASS;
}
[Variation("ReadBase64 with whitespace in the middle")]
public int TestTextReadBase64_26()
{
byte[] buffer = new byte[1];
string strxml = "<abc> AQID B B </abc>";
int nRead;
ReloadSourceStr(strxml);
DataReader.PositionOnElement("abc");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
for (int i = 0; i < 4; i++)
{
nRead = DataReader.ReadContentAsBase64(buffer, 0, 1);
CError.Compare(nRead, 1, "res" + i);
CError.Compare(buffer[0], (byte)(i + 1), "buffer " + i);
}
nRead = DataReader.ReadContentAsBase64(buffer, 0, 1);
CError.Compare(nRead, 0, "nRead 0");
return TEST_PASS;
}
[Variation("ReadBase64 with = in the middle")]
public int TestTextReadBase64_27()
{
byte[] buffer = new byte[1];
string strxml = "<abc>AQI=ID</abc>";
int nRead;
ReloadSourceStr(strxml);
DataReader.PositionOnElement("abc");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
for (int i = 0; i < 2; i++)
{
nRead = DataReader.ReadContentAsBase64(buffer, 0, 1);
CError.Compare(nRead, 1, "res" + i);
CError.Compare(buffer[0], (byte)(i + 1), "buffer " + i);
}
try
{
DataReader.ReadContentAsBase64(buffer, 0, 1);
CError.WriteLine("ReadBase64 with = in the middle succeeded");
return TEST_FAIL;
}
catch (XmlException e)
{
if (IsXmlNodeReader() || IsXmlNodeReaderDataDoc() || IsXPathNavigatorReader() || IsXmlValidatingReader() || IsCharCheckingReader())
CheckException("Xml_InvalidBase64Value", e);
else
CheckXmlException("Xml_UserException", e, 1, 6);
}
return TEST_PASS;
}
//[Variation("ReadBase64 runs into an Overflow", Params = new object[] { "10000" })]
//[Variation("ReadBase64 runs into an Overflow", Params = new object[] { "1000000" })]
//[Variation("ReadBase64 runs into an Overflow", Params = new object[] { "10000000" })]
public int ReadBase64BufferOverflowWorksProperly()
{
int totalfilesize = Convert.ToInt32(CurVariation.Params[0].ToString());
CError.WriteLine(" totalfilesize = " + totalfilesize);
string ascii = new string('c', totalfilesize);
byte[] bits = Encoding.Unicode.GetBytes(ascii);
CError.WriteLineIgnore("Count = " + bits.Length);
string base64str = Convert.ToBase64String(bits);
string fileName = "bug105376_" + CurVariation.Params[0].ToString() + ".xml";
MemoryStream mems = new MemoryStream();
StreamWriter sw = new StreamWriter(mems);
{
sw.Write("<root><base64>");
sw.Write(base64str);
sw.Write("</base64></root>");
}
FilePathUtil.addStream(fileName, mems);
ReloadSource(fileName);
int SIZE = (totalfilesize - 30);
int SIZE64 = SIZE * 3 / 4;
DataReader.PositionOnElement("base64");
DataReader.Read();
if (CheckCanReadBinaryContent()) return TEST_PASS;
CError.WriteLine("ReadBase64 method... ");
CError.WriteLine(System.Int32.MaxValue);
byte[] base64 = new byte[SIZE64];
CError.WriteLine("SIZE64 = {0}", base64.Length);
int startPos = 0;
int readSize = 4096;
int currentSize = 0;
currentSize = DataReader.ReadContentAsBase64(base64, startPos, readSize);
CError.Compare(currentSize, readSize, "Read other than first chunk");
readSize = SIZE64 - readSize;
currentSize = DataReader.ReadContentAsBase64(base64, startPos, readSize);
CError.Compare(currentSize, readSize, "Read other than remaining Chunk Size");
readSize = 0;
currentSize = DataReader.ReadContentAsBase64(base64, startPos, readSize);
CError.Compare(currentSize, 0, "Read other than Zero Bytes");
DataReader.Close();
return TEST_PASS;
}
}
[InheritRequired()]
public abstract partial class TCReadElementContentAsBase64 : TCXMLReaderBaseGeneral
{
public const string ST_ELEM_NAME1 = "ElemAll";
public const string ST_ELEM_NAME2 = "ElemEmpty";
public const string ST_ELEM_NAME3 = "ElemNum";
public const string ST_ELEM_NAME4 = "ElemText";
public const string ST_ELEM_NAME5 = "ElemNumText";
public const string ST_ELEM_NAME6 = "ElemLong";
private const string Base64Xml = "Base64.xml";
public const string strTextBase64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz";
public const string strNumBase64 = "0123456789+/";
public override int Init(object objParam)
{
int ret = base.Init(objParam);
CreateTestFile(EREADER_TYPE.BASE64_TEST);
return ret;
}
public override int Terminate(object objParam)
{
DataReader.Close();
return base.Terminate(objParam);
}
private bool VerifyInvalidReadBase64(int iBufferSize, int iIndex, int iCount, Type exceptionType)
{
bool bPassed = false;
byte[] buffer = new byte[iBufferSize];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME1);
if (CheckCanReadBinaryContent()) return true;
try
{
DataReader.ReadContentAsBase64(buffer, iIndex, iCount);
}
catch (Exception e)
{
CError.WriteLine("Actual exception:{0}", e.GetType().ToString());
CError.WriteLine("Expected exception:{0}", exceptionType.ToString());
bPassed = (e.GetType().ToString() == exceptionType.ToString());
}
return bPassed;
}
protected void TestOnInvalidNodeType(XmlNodeType nt)
{
ReloadSource();
PositionOnNodeType(nt);
if (CheckCanReadBinaryContent()) return;
try
{
byte[] buffer = new byte[1];
int nBytes = DataReader.ReadElementContentAsBase64(buffer, 0, 1);
}
catch (InvalidOperationException ioe)
{
if (ioe.ToString().IndexOf(nt.ToString()) < 0)
CError.Compare(false, "Call threw wrong invalid operation exception on " + nt);
else
return;
}
CError.Compare(false, "Call succeeded on " + nt);
}
////////////////////////////////////////////////////////////////
// Variations
////////////////////////////////////////////////////////////////
[Variation("ReadBase64 Element with all valid value")]
public int TestReadBase64_1()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME1);
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadElementContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, (strTextBase64 + strNumBase64), "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with all valid Num value", Pri = 0)]
public int TestReadBase64_2()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME3);
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadElementContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, strNumBase64, "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with all valid Text value")]
public int TestReadBase64_3()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME4);
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadElementContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, strTextBase64, "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with all valid value (from concatenation), Pri=0")]
public int TestReadBase64_5()
{
int base64len = 0;
byte[] base64 = new byte[1000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME5);
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadElementContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
CError.Compare(strActbase64, (strTextBase64 + strNumBase64), "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 Element with Long valid value (from concatenation), Pri=0")]
public int TestReadBase64_6()
{
int base64len = 0;
byte[] base64 = new byte[2000];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME6);
if (CheckCanReadBinaryContent()) return TEST_PASS;
base64len = DataReader.ReadElementContentAsBase64(base64, 0, base64.Length);
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
strActbase64 += System.BitConverter.ToChar(base64, i);
}
string strExpbase64 = "";
for (int i = 0; i < 10; i++)
strExpbase64 += (strTextBase64 + strNumBase64);
CError.Compare(strActbase64, strExpbase64, "Compare All Valid Base64");
return TEST_PASS;
}
[Variation("ReadBase64 with count > buffer size")]
public int ReadBase64_7()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 0, 6, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with count < 0")]
public int ReadBase64_8()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 2, -1, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with index > buffer size")]
public int ReadBase64_9()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 5, 1, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with index < 0")]
public int ReadBase64_10()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, -1, 1, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 with index + count exceeds buffer")]
public int ReadBase64_11()
{
return BoolToLTMResult(VerifyInvalidReadBase64(5, 0, 10, typeof(ArgumentOutOfRangeException)));
}
[Variation("ReadBase64 index & count =0")]
public int ReadBase64_12()
{
byte[] buffer = new byte[5];
int iCount = 0;
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME1);
if (CheckCanReadBinaryContent()) return TEST_PASS;
iCount = DataReader.ReadElementContentAsBase64(buffer, 0, 0);
CError.Compare(iCount, 0, "has to be zero");
return TEST_PASS;
}
[Variation("ReadBase64 Element multiple into same buffer (using offset), Pri=0")]
public int TestReadBase64_13()
{
int base64len = 20;
byte[] base64 = new byte[base64len];
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME4);
if (CheckCanReadBinaryContent()) return TEST_PASS;
string strActbase64 = "";
for (int i = 0; i < base64len; i = i + 2)
{
DataReader.ReadElementContentAsBase64(base64, i, 2);
strActbase64 = (System.BitConverter.ToChar(base64, i)).ToString();
CError.Compare(String.Compare(strActbase64, 0, strTextBase64, i / 2, 1), 0, "Compare All Valid Base64");
}
return TEST_PASS;
}
[Variation("ReadBase64 with buffer == null")]
public int TestReadBase64_14()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement(ST_ELEM_NAME4);
if (CheckCanReadBinaryContent()) return TEST_PASS;
try
{
DataReader.ReadElementContentAsBase64(null, 0, 0);
}
catch (ArgumentNullException)
{
return TEST_PASS;
}
return TEST_FAIL;
}
[Variation("ReadBase64 after failure")]
public int TestReadBase64_15()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement("ElemErr");
if (CheckCanReadBinaryContent()) return TEST_PASS;
var line = ((IXmlLineInfo)DataReader.Internal).LinePosition + "ElemErr".Length + 1;
byte[] buffer = new byte[10];
int nRead = 0;
try
{
nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 1);
return TEST_FAIL;
}
catch (XmlException e)
{
if (IsXmlNodeReader() || IsXmlNodeReaderDataDoc() || IsXPathNavigatorReader() || IsXmlValidatingReader() || IsCharCheckingReader())
CheckException("Xml_InvalidBase64Value", e);
else
CheckXmlException("Xml_UserException", e, 1, line);
}
return TEST_PASS;
}
[Variation("Read after partial ReadBase64", Pri = 0)]
public int TestReadBase64_16()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement("ElemNum");
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[10];
int nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 8);
CError.Compare(nRead, 8, "0");
DataReader.Read();
CError.Compare(DataReader.NodeType, XmlNodeType.Text, "1vn");
return TEST_PASS;
}
[Variation("Current node on multiple calls")]
public int TestReadBase64_17()
{
ReloadSource(EREADER_TYPE.BASE64_TEST);
DataReader.PositionOnElement("ElemNum");
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[30];
int nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 2);
CError.Compare(nRead, 2, "0");
nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 23);
CError.Compare(nRead, 22, "1");
CError.Compare(DataReader.NodeType, XmlNodeType.EndElement, "Nodetype not end element");
CError.Compare(DataReader.Name, "ElemNum", "Nodetype not end element");
return TEST_PASS;
}
[Variation("ReadBase64 with incomplete sequence")]
public int TestTextReadBase64_23()
{
byte[] expected = new byte[] { 0, 16, 131, 16, 81 };
byte[] buffer = new byte[10];
string strxml = "<r><ROOT>ABCDEFG</ROOT></r>";
ReloadSourceStr(strxml);
DataReader.PositionOnElement("ROOT");
if (CheckCanReadBinaryContent()) return TEST_PASS;
int result = 0;
int nRead;
while ((nRead = DataReader.ReadElementContentAsBase64(buffer, result, 1)) > 0)
result += nRead;
CError.Compare(result, expected.Length, "res");
for (int i = 0; i < result; i++)
CError.Compare(buffer[i], expected[i], "buffer[" + i + "]");
return TEST_PASS;
}
[Variation("ReadBase64 when end tag doesn't exist")]
public int TestTextReadBase64_24()
{
if (IsRoundTrippedReader())
return TEST_SKIPPED;
byte[] buffer = new byte[5000];
string strxml = "<B>" + new string('c', 5000);
ReloadSourceStr(strxml);
DataReader.PositionOnElement("B");
if (CheckCanReadBinaryContent()) return TEST_PASS;
try
{
DataReader.ReadElementContentAsBase64(buffer, 0, 5000);
CError.WriteLine("Accepted incomplete element");
return TEST_FAIL;
}
catch (XmlException e)
{
CheckXmlException("Xml_UnexpectedEOFInElementContent", e, 1, 5004);
}
return TEST_PASS;
}
[Variation("ReadBase64 with whitespace in the middle")]
public int TestTextReadBase64_26()
{
byte[] buffer = new byte[1];
string strxml = "<abc> AQID B B </abc>";
int nRead;
ReloadSourceStr(strxml);
DataReader.PositionOnElement("abc");
if (CheckCanReadBinaryContent()) return TEST_PASS;
for (int i = 0; i < 4; i++)
{
nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 1);
CError.Compare(nRead, 1, "res" + i);
CError.Compare(buffer[0], (byte)(i + 1), "buffer " + i);
}
nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 1);
CError.Compare(nRead, 0, "nRead 0");
return TEST_PASS;
}
[Variation("ReadBase64 with = in the middle")]
public int TestTextReadBase64_27()
{
byte[] buffer = new byte[1];
string strxml = "<abc>AQI=ID</abc>";
int nRead;
ReloadSourceStr(strxml);
DataReader.PositionOnElement("abc");
if (CheckCanReadBinaryContent()) return TEST_PASS;
for (int i = 0; i < 2; i++)
{
nRead = DataReader.ReadElementContentAsBase64(buffer, 0, 1);
CError.Compare(nRead, 1, "res" + i);
CError.Compare(buffer[0], (byte)(i + 1), "buffer " + i);
}
try
{
DataReader.ReadElementContentAsBase64(buffer, 0, 1);
CError.WriteLine("ReadBase64 with = in the middle succeeded");
return TEST_FAIL;
}
catch (XmlException e)
{
if (IsXmlNodeReader() || IsXmlNodeReaderDataDoc() || IsXPathNavigatorReader() || IsXmlValidatingReader() || IsCharCheckingReader())
CheckException("Xml_InvalidBase64Value", e);
else
CheckXmlException("Xml_UserException", e, 1, 6);
}
return TEST_PASS;
}
//[Variation("ReadBase64 runs into an Overflow", Params = new object[] { "10000" })]
//[Variation("ReadBase64 runs into an Overflow", Params = new object[] { "1000000" })]
//[Variation("ReadBase64 runs into an Overflow", Params = new object[] { "10000000" })]
public int ReadBase64RunsIntoOverflow()
{
if (CheckCanReadBinaryContent() || IsSubtreeReader() || IsCharCheckingReader() || IsWrappedReader())
return TEST_SKIPPED;
int totalfilesize = Convert.ToInt32(CurVariation.Params[0].ToString());
CError.WriteLine(" totalfilesize = " + totalfilesize);
string ascii = new string('c', totalfilesize);
byte[] bits = Encoding.Unicode.GetBytes(ascii);
CError.WriteLineIgnore("Count = " + bits.Length);
string base64str = Convert.ToBase64String(bits);
string fileName = "bug105376_" + CurVariation.Params[0].ToString() + ".xml";
MemoryStream mems = new MemoryStream();
StreamWriter sw = new StreamWriter(mems);
{
sw.Write("<root><base64>");
sw.Write(base64str);
sw.Write("</base64></root>");
}
FilePathUtil.addStream(fileName, mems);
ReloadSource(fileName);
int SIZE = (totalfilesize - 30);
int SIZE64 = SIZE * 3 / 4;
DataReader.PositionOnElement("base64");
CError.WriteLine("ReadBase64 method... ");
CError.WriteLine(System.Int32.MaxValue);
byte[] base64 = new byte[SIZE64];
CError.WriteLine("SIZE64 = {0}", base64.Length);
int startPos = 0;
int readSize = 4096;
int currentSize = 0;
currentSize = DataReader.ReadElementContentAsBase64(base64, startPos, readSize);
CError.Compare(currentSize, readSize, "Read other than first chunk");
readSize = SIZE64 - readSize;
currentSize = DataReader.ReadElementContentAsBase64(base64, startPos, readSize);
CError.Compare(currentSize, readSize, "Read other than remaining Chunk Size");
readSize = 0;
currentSize = DataReader.ReadElementContentAsBase64(base64, startPos, readSize);
CError.Compare(currentSize, 0, "Read other than Zero Bytes");
DataReader.Close();
return TEST_PASS;
}
[Variation("WS:WireCompat:hex binary fails to send/return data after 1787 bytes")]
public int TestReadBase64ReadsTheContent()
{
string filename = Path.Combine(TestData, "Common", "Bug99148.xml");
ReloadSource(filename);
DataReader.MoveToContent();
if (CheckCanReadBinaryContent()) return TEST_PASS;
int bytes = -1;
StringBuilder output = new StringBuilder();
while (bytes != 0)
{
byte[] bbb = new byte[1024];
bytes = DataReader.ReadElementContentAsBase64(bbb, 0, bbb.Length);
for (int i = 0; i < bytes; i++)
{
CError.Write(bbb[i].ToString());
output.AppendFormat(bbb[i].ToString());
}
}
CError.WriteLine();
CError.WriteLine("Length of the output : " + output.ToString().Length);
CError.Compare(output.ToString().Length, 6072, "Expected Length : 6072");
return TEST_PASS;
}
[Variation("SubtreeReader inserted attributes don't work with ReadContentAsBase64")]
public int SubtreeReaderInsertedAttributesWorkWithReadContentAsBase64()
{
if (CheckCanReadBinaryContent()) return TEST_PASS;
string strxml1 = "<root xmlns='";
string strxml2 = "'><bar/></root>";
string[] binValue = new string[] { "AAECAwQFBgcI==", "0102030405060708090a0B0c" };
for (int i = 0; i < binValue.Length; i++)
{
string strxml = strxml1 + binValue[i] + strxml2;
ReloadSourceStr(strxml);
DataReader.Read();
DataReader.Read();
using (XmlReader sr = DataReader.ReadSubtree())
{
sr.Read();
sr.MoveToFirstAttribute();
sr.MoveToFirstAttribute();
byte[] bytes = new byte[4];
while ((sr.ReadContentAsBase64(bytes, 0, bytes.Length)) > 0) { }
}
}
return TEST_PASS;
}
[Variation("call ReadContentAsBase64 on two or more nodes")]
public int TestReadBase64_28()
{
string xml = "<elem0>123<elem1>123<elem2>123</elem2>123</elem1>123</elem0>";
ReloadSource(new StringReader(xml));
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[3];
int startPos = 0;
int readSize = 3;
int currentSize = 0;
DataReader.Read();
while (DataReader.Read())
{
currentSize = DataReader.ReadContentAsBase64(buffer, startPos, readSize);
CError.Equals(currentSize, 2, "size");
CError.Equals(buffer[0], (byte)215, "buffer1");
CError.Equals(buffer[1], (byte)109, "buffer2");
if (!(IsXPathNavigatorReader() || IsXmlNodeReader() || IsXmlNodeReaderDataDoc()))
{
CError.WriteLine("LineNumber" + DataReader.LineNumber);
CError.WriteLine("LinePosition" + DataReader.LinePosition);
}
}
DataReader.Close();
return TEST_PASS;
}
[Variation("read Base64 over invalid text node")]
public int TestReadBase64_29()
{
string xml = "<elem0>12%45<elem1>12%45<elem2>12%45</elem2>12%45</elem1>12%45</elem0>";
ReloadSource(new StringReader(xml));
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[5];
int currentSize = 0;
while (DataReader.Read())
{
DataReader.Read();
try
{
currentSize = DataReader.ReadContentAsBase64(buffer, 0, 5);
if (!(IsCharCheckingReader() || IsXmlNodeReader() || IsXmlNodeReaderDataDoc() || IsXmlValidatingReader() || IsXPathNavigatorReader()))
return TEST_FAIL;
}
catch (XmlException)
{
CError.Compare(currentSize, 0, "size");
}
}
DataReader.Close();
return TEST_PASS;
}
[Variation("goto to text node, ask got.Value, readcontentasBase64")]
public int TestReadBase64_30()
{
string xml = "<elem0>123</elem0>";
ReloadSourceStr(xml);
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[3];
DataReader.Read();
DataReader.Read();
CError.Compare(DataReader.Value, "123", "value");
CError.Compare(DataReader.ReadContentAsBase64(buffer, 0, 1), 1, "size");
DataReader.Close();
return TEST_PASS;
}
[Variation("goto to text node, readcontentasBase64, ask got.Value")]
public int TestReadBase64_31()
{
string xml = "<elem0>123</elem0>";
ReloadSourceStr(xml);
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[3];
DataReader.Read();
DataReader.Read();
CError.Compare(DataReader.ReadContentAsBase64(buffer, 0, 1), 1, "size");
CError.Compare(DataReader.Value, (IsCharCheckingReader() || IsXmlNodeReader() || IsXmlNodeReaderDataDoc() || IsXmlValidatingReader() || IsXPathNavigatorReader()) ? "123" : "3", "value");
DataReader.Close();
return TEST_PASS;
}
[Variation("goto to huge text node, read several chars with ReadContentAsBase64 and Move forward with .Read()")]
public int TestReadBase64_32()
{
string xml = "<elem0>1234567 89 1234 123345 5676788 5567712 34567 89 1234 123345 5676788 55677</elem0>";
ReloadSource(new StringReader(xml));
byte[] buffer = new byte[5];
DataReader.Read();
DataReader.Read();
try
{
CError.Compare(DataReader.ReadContentAsBase64(buffer, 0, 5), 5, "size");
}
catch (NotSupportedException) { return TEST_PASS; }
DataReader.Read();
DataReader.Close();
return TEST_PASS;
}
[Variation("goto to huge text node with invalid chars, read several chars with ReadContentAsBase64 and Move forward with .Read()")]
public int TestReadBase64_33()
{
string xml = "<elem0>123 $^ 56789 abcdefg hij klmn opqrst 12345 uvw xy ^ z</elem0>";
ReloadSource(new StringReader(xml));
byte[] buffer = new byte[5];
DataReader.Read();
DataReader.Read();
try
{
CError.Compare(DataReader.ReadContentAsBase64(buffer, 0, 5), 5, "size");
DataReader.Read();
}
catch (XmlException) { return TEST_PASS; }
catch (NotSupportedException) { return TEST_PASS; }
finally
{
DataReader.Close();
}
return TEST_FAIL;
}
//[Variation("ReadContentAsBase64 on an xmlns attribute", Param = "<foo xmlns='default'> <bar id='1'/> </foo>")]
//[Variation("ReadContentAsBase64 on an xmlns:k attribute", Param = "<k:foo xmlns:k='default'> <k:bar id='1'/> </k:foo>")]
//[Variation("ReadContentAsBase64 on an xml:space attribute", Param = "<foo xml:space='default'> <bar id='1'/> </foo>")]
//[Variation("ReadContentAsBase64 on an xml:lang attribute", Param = "<foo xml:lang='default'> <bar id='1'/> </foo>")]
public int TestReadBase64_34()
{
string xml = (string)CurVariation.Param;
byte[] buffer = new byte[8];
try
{
ReloadSource(new StringReader(xml));
DataReader.Read();
if (IsBinaryReader()) DataReader.Read();
DataReader.MoveToAttribute(0);
CError.Compare(DataReader.Value, "default", "value");
CError.Equals(DataReader.ReadContentAsBase64(buffer, 0, 8), 5, "size");
}
catch (NotSupportedException) { }
DataReader.Close();
return TEST_PASS;
}
[Variation("call ReadContentAsBase64 on two or more nodes and whitespace")]
public int TestReadReadBase64_35()
{
string xml = @"<elem0> 123" + "\n" + @" <elem1>" + "\r" + @"123
<elem2>
123 </elem2>" + "\r\n" + @" 123</elem1> 123 </elem0>";
ReloadSource(new StringReader(xml));
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[3];
int startPos = 0;
int readSize = 3;
int currentSize = 0;
DataReader.Read();
while (DataReader.Read())
{
currentSize = DataReader.ReadContentAsBase64(buffer, startPos, readSize);
CError.Equals(currentSize, 2, "size");
CError.Equals(buffer[0], (byte)215, "buffer1");
CError.Equals(buffer[1], (byte)109, "buffer2");
if (!(IsXPathNavigatorReader() || IsXmlNodeReader() || IsXmlNodeReaderDataDoc()))
{
CError.WriteLine("LineNumber" + DataReader.LineNumber);
CError.WriteLine("LinePosition" + DataReader.LinePosition);
}
}
DataReader.Close();
return TEST_PASS;
}
[Variation("call ReadContentAsBase64 on two or more nodes and whitespace after call Value")]
public int TestReadReadBase64_36()
{
string xml = @"<elem0> 123" + "\n" + @" <elem1>" + "\r" + @"123
<elem2>
123 </elem2>" + "\r\n" + @" 123</elem1> 123 </elem0>";
ReloadSource(new StringReader(xml));
if (CheckCanReadBinaryContent()) return TEST_PASS;
byte[] buffer = new byte[3];
int startPos = 0;
int readSize = 3;
int currentSize = 0;
DataReader.Read();
while (DataReader.Read())
{
CError.Equals(DataReader.Value.Contains("123"), "Value");
currentSize = DataReader.ReadContentAsBase64(buffer, startPos, readSize);
CError.Equals(currentSize, 2, "size");
CError.Equals(buffer[0], (byte)215, "buffer1");
CError.Equals(buffer[1], (byte)109, "buffer2");
if (!(IsXPathNavigatorReader() || IsXmlNodeReader() || IsXmlNodeReaderDataDoc()))
{
CError.WriteLine("LineNumber" + DataReader.LineNumber);
CError.WriteLine("LinePosition" + DataReader.LinePosition);
}
}
DataReader.Close();
return TEST_PASS;
}
}
}
| |
namespace Azure.Messaging.WebPubSub
{
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public sealed partial class ClientCertificateInfo
{
public ClientCertificateInfo(string thumbprint) { }
public string Thumbprint { get { throw null; } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class ConnectedEventRequest : Azure.Messaging.WebPubSub.ServiceRequest
{
public ConnectedEventRequest() : base (default(bool), default(bool), default(bool), default(bool), default(string)) { }
public override string Name { get { throw null; } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public sealed partial class ConnectEventRequest : Azure.Messaging.WebPubSub.ServiceRequest
{
public ConnectEventRequest(System.Collections.Generic.IDictionary<string, string[]> claims, System.Collections.Generic.IDictionary<string, string[]> query, string[] subprotocols, Azure.Messaging.WebPubSub.ClientCertificateInfo[] clientCertificateInfos) : base (default(bool), default(bool), default(bool), default(bool), default(string)) { }
public System.Collections.Generic.IDictionary<string, string[]> Claims { get { throw null; } }
public Azure.Messaging.WebPubSub.ClientCertificateInfo[] ClientCertificates { get { throw null; } }
public override string Name { get { throw null; } }
public System.Collections.Generic.IDictionary<string, string[]> Query { get { throw null; } }
public string[] Subprotocols { get { throw null; } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public sealed partial class DisconnectedEventRequest : Azure.Messaging.WebPubSub.ServiceRequest
{
public DisconnectedEventRequest(string reason) : base (default(bool), default(bool), default(bool), default(bool), default(string)) { }
public override string Name { get { throw null; } }
public string Reason { get { throw null; } }
}
public sealed partial class InvalidRequest : Azure.Messaging.WebPubSub.ServiceRequest
{
public InvalidRequest(System.Net.HttpStatusCode statusCode, string message = null) : base (default(bool), default(bool), default(bool), default(bool), default(string)) { }
public override string Name { get { throw null; } }
}
[Newtonsoft.Json.JsonConverterAttribute(typeof(Newtonsoft.Json.Converters.StringEnumConverter))]
public enum MessageDataType
{
[System.Runtime.Serialization.EnumMemberAttribute(Value="binary")]
Binary = 0,
[System.Runtime.Serialization.EnumMemberAttribute(Value="json")]
Json = 1,
[System.Runtime.Serialization.EnumMemberAttribute(Value="text")]
Text = 2,
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public sealed partial class MessageEventRequest : Azure.Messaging.WebPubSub.ServiceRequest
{
public MessageEventRequest(System.BinaryData message, Azure.Messaging.WebPubSub.MessageDataType dataType) : base (default(bool), default(bool), default(bool), default(bool), default(string)) { }
public Azure.Messaging.WebPubSub.MessageDataType DataType { get { throw null; } }
public System.BinaryData Message { get { throw null; } }
public override string Name { get { throw null; } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public abstract partial class ServiceRequest
{
public ServiceRequest(bool isValidationRequest, bool valid, bool unauthorized, bool badRequest, string error = null) { }
public bool BadRequest { get { throw null; } }
public string ErrorMessage { get { throw null; } }
public bool IsValidationRequest { get { throw null; } }
public abstract string Name { get; }
public bool Unauthorized { get { throw null; } }
public bool Valid { get { throw null; } }
}
public sealed partial class ValidationRequest : Azure.Messaging.WebPubSub.ServiceRequest
{
public ValidationRequest(bool valid) : base (default(bool), default(bool), default(bool), default(bool), default(string)) { }
public override string Name { get { throw null; } }
}
}
namespace Microsoft.Azure.WebJobs.Extensions.WebPubSub
{
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class AddConnectionToGroup : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public AddConnectionToGroup() { }
public string ConnectionId { get { throw null; } set { } }
public string Group { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class AddUserToGroup : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public AddUserToGroup() { }
public string Group { get { throw null; } set { } }
public string UserId { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class CloseClientConnection : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public CloseClientConnection() { }
public string ConnectionId { get { throw null; } set { } }
public string Reason { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class ConnectionContext
{
public ConnectionContext() { }
public string ConnectionId { get { throw null; } }
public string EventName { get { throw null; } }
public Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubEventType EventType { get { throw null; } }
public System.Collections.Generic.Dictionary<string, Microsoft.Extensions.Primitives.StringValues> Headers { get { throw null; } }
public string Hub { get { throw null; } }
public string Signature { get { throw null; } }
public string UserId { get { throw null; } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class ConnectResponse : Microsoft.Azure.WebJobs.Extensions.WebPubSub.ServiceResponse
{
public ConnectResponse() { }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Default)]
public string[] Groups { get { throw null; } set { } }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Default)]
public string[] Roles { get { throw null; } set { } }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Default)]
public string Subprotocol { get { throw null; } set { } }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Default)]
public string UserId { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class ErrorResponse : Microsoft.Azure.WebJobs.Extensions.WebPubSub.ServiceResponse
{
[Newtonsoft.Json.JsonConstructorAttribute]
public ErrorResponse() { }
public ErrorResponse(Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubErrorCode code, string message = null) { }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Always)]
public Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubErrorCode Code { get { throw null; } set { } }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Default)]
public string ErrorMessage { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class GrantGroupPermission : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public GrantGroupPermission() { }
public string ConnectionId { get { throw null; } set { } }
public Azure.Messaging.WebPubSub.WebPubSubPermission Permission { get { throw null; } set { } }
public string TargetName { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class MessageResponse : Microsoft.Azure.WebJobs.Extensions.WebPubSub.ServiceResponse
{
public MessageResponse() { }
public Azure.Messaging.WebPubSub.MessageDataType DataType { get { throw null; } set { } }
[Newtonsoft.Json.JsonPropertyAttribute(Required=Newtonsoft.Json.Required.Always)]
public System.BinaryData Message { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class RemoveConnectionFromGroup : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public RemoveConnectionFromGroup() { }
public string ConnectionId { get { throw null; } set { } }
public string Group { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class RemoveUserFromAllGroups : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public RemoveUserFromAllGroups() { }
public string UserId { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class RemoveUserFromGroup : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public RemoveUserFromGroup() { }
public string Group { get { throw null; } set { } }
public string UserId { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class RevokeGroupPermission : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public RevokeGroupPermission() { }
public string ConnectionId { get { throw null; } set { } }
public Azure.Messaging.WebPubSub.WebPubSubPermission Permission { get { throw null; } set { } }
public string TargetName { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class SendToAll : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public SendToAll() { }
public Azure.Messaging.WebPubSub.MessageDataType DataType { get { throw null; } set { } }
public string[] Excluded { get { throw null; } set { } }
public System.BinaryData Message { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class SendToConnection : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public SendToConnection() { }
public string ConnectionId { get { throw null; } set { } }
public Azure.Messaging.WebPubSub.MessageDataType DataType { get { throw null; } set { } }
public System.BinaryData Message { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class SendToGroup : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public SendToGroup() { }
public Azure.Messaging.WebPubSub.MessageDataType DataType { get { throw null; } set { } }
public string[] Excluded { get { throw null; } set { } }
public string Group { get { throw null; } set { } }
public System.BinaryData Message { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class SendToUser : Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubOperation
{
public SendToUser() { }
public Azure.Messaging.WebPubSub.MessageDataType DataType { get { throw null; } set { } }
public System.BinaryData Message { get { throw null; } set { } }
public string UserId { get { throw null; } set { } }
}
public abstract partial class ServiceResponse
{
protected ServiceResponse() { }
}
[Microsoft.Azure.WebJobs.Description.BindingAttribute]
[System.AttributeUsageAttribute(System.AttributeTargets.Parameter | System.AttributeTargets.ReturnValue)]
public partial class WebPubSubAttribute : System.Attribute
{
public WebPubSubAttribute() { }
[Microsoft.Azure.WebJobs.Description.ConnectionStringAttribute]
public string ConnectionStringSetting { get { throw null; } set { } }
[Microsoft.Azure.WebJobs.Description.AutoResolveAttribute]
public string Hub { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class WebPubSubConnection
{
public WebPubSubConnection(System.Uri url) { }
public string AccessToken { get { throw null; } }
public string BaseUrl { get { throw null; } }
public string Url { get { throw null; } }
}
[Microsoft.Azure.WebJobs.Description.BindingAttribute]
[System.AttributeUsageAttribute(System.AttributeTargets.Parameter | System.AttributeTargets.ReturnValue)]
public partial class WebPubSubConnectionAttribute : System.Attribute
{
public WebPubSubConnectionAttribute() { }
[Microsoft.Azure.WebJobs.Description.ConnectionStringAttribute]
public string ConnectionStringSetting { get { throw null; } set { } }
[Microsoft.Azure.WebJobs.Description.AutoResolveAttribute]
public string Hub { get { throw null; } set { } }
[Microsoft.Azure.WebJobs.Description.AutoResolveAttribute]
public string UserId { get { throw null; } set { } }
}
[Newtonsoft.Json.JsonConverterAttribute(typeof(Newtonsoft.Json.Converters.StringEnumConverter))]
public enum WebPubSubErrorCode
{
[System.Runtime.Serialization.EnumMemberAttribute(Value="unauthorized")]
Unauthorized = 0,
[System.Runtime.Serialization.EnumMemberAttribute(Value="userError")]
UserError = 1,
[System.Runtime.Serialization.EnumMemberAttribute(Value="serverError")]
ServerError = 2,
}
[System.Text.Json.Serialization.JsonConverterAttribute(typeof(Newtonsoft.Json.Converters.StringEnumConverter))]
public enum WebPubSubEventType
{
[System.Runtime.Serialization.EnumMemberAttribute(Value="system")]
System = 0,
[System.Runtime.Serialization.EnumMemberAttribute(Value="user")]
User = 1,
}
public static partial class WebPubSubJobsBuilderExtensions
{
public static Microsoft.Azure.WebJobs.IWebJobsBuilder AddWebPubSub(this Microsoft.Azure.WebJobs.IWebJobsBuilder builder) { throw null; }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public abstract partial class WebPubSubOperation
{
protected WebPubSubOperation() { }
public string OperationKind { get { throw null; } set { } }
}
public partial class WebPubSubOptions : Microsoft.Azure.WebJobs.Hosting.IOptionsFormatter
{
public WebPubSubOptions() { }
public string Hub { get { throw null; } set { } }
public string Format() { throw null; }
}
[Newtonsoft.Json.JsonObjectAttribute(NamingStrategyType=typeof(Newtonsoft.Json.Serialization.CamelCaseNamingStrategy))]
public partial class WebPubSubRequest
{
internal WebPubSubRequest() { }
public Microsoft.Azure.WebJobs.Extensions.WebPubSub.ConnectionContext ConnectionContext { get { throw null; } }
public Azure.Messaging.WebPubSub.ServiceRequest Request { get { throw null; } }
public System.Net.Http.HttpResponseMessage Response { get { throw null; } }
}
[Microsoft.Azure.WebJobs.Description.BindingAttribute]
[System.AttributeUsageAttribute(System.AttributeTargets.Parameter | System.AttributeTargets.ReturnValue)]
public partial class WebPubSubRequestAttribute : System.Attribute
{
public WebPubSubRequestAttribute() { }
}
[Microsoft.Azure.WebJobs.Description.BindingAttribute(TriggerHandlesReturnValue=true)]
[System.AttributeUsageAttribute(System.AttributeTargets.Parameter)]
public partial class WebPubSubTriggerAttribute : System.Attribute
{
public WebPubSubTriggerAttribute(Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubEventType eventType, string eventName) { }
public WebPubSubTriggerAttribute(string hub, Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubEventType eventType, string eventName) { }
[Microsoft.Azure.WebJobs.Description.AutoResolveAttribute]
[System.ComponentModel.DataAnnotations.RequiredAttribute]
public string EventName { get { throw null; } }
[Microsoft.Azure.WebJobs.Description.AutoResolveAttribute]
public Microsoft.Azure.WebJobs.Extensions.WebPubSub.WebPubSubEventType EventType { get { throw null; } }
[Microsoft.Azure.WebJobs.Description.AutoResolveAttribute]
public string Hub { get { throw null; } }
}
public partial class WebPubSubWebJobsStartup : Microsoft.Azure.WebJobs.Hosting.IWebJobsStartup
{
public WebPubSubWebJobsStartup() { }
public void Configure(Microsoft.Azure.WebJobs.IWebJobsBuilder builder) { }
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.1.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ServiceFabric
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for ClustersOperations.
/// </summary>
public static partial class ClustersOperationsExtensions
{
/// <summary>
/// Update cluster configuration
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterUpdateParameters'>
/// The parameters which contains the property value and property name which
/// used to update the cluster configuration
/// </param>
public static Cluster Update(this IClustersOperations operations, string resourceGroupName, string clusterName, ClusterUpdateParameters clusterUpdateParameters)
{
return operations.UpdateAsync(resourceGroupName, clusterName, clusterUpdateParameters).GetAwaiter().GetResult();
}
/// <summary>
/// Update cluster configuration
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterUpdateParameters'>
/// The parameters which contains the property value and property name which
/// used to update the cluster configuration
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Cluster> UpdateAsync(this IClustersOperations operations, string resourceGroupName, string clusterName, ClusterUpdateParameters clusterUpdateParameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.UpdateWithHttpMessagesAsync(resourceGroupName, clusterName, clusterUpdateParameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Get cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
public static Cluster Get(this IClustersOperations operations, string resourceGroupName, string clusterName)
{
return operations.GetAsync(resourceGroupName, clusterName).GetAwaiter().GetResult();
}
/// <summary>
/// Get cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Cluster> GetAsync(this IClustersOperations operations, string resourceGroupName, string clusterName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, clusterName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Create cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterResource'>
/// Put Request
/// </param>
public static Cluster Create(this IClustersOperations operations, string resourceGroupName, string clusterName, Cluster clusterResource)
{
return operations.CreateAsync(resourceGroupName, clusterName, clusterResource).GetAwaiter().GetResult();
}
/// <summary>
/// Create cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterResource'>
/// Put Request
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Cluster> CreateAsync(this IClustersOperations operations, string resourceGroupName, string clusterName, Cluster clusterResource, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateWithHttpMessagesAsync(resourceGroupName, clusterName, clusterResource, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Delete cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
public static void Delete(this IClustersOperations operations, string resourceGroupName, string clusterName)
{
operations.DeleteAsync(resourceGroupName, clusterName).GetAwaiter().GetResult();
}
/// <summary>
/// Delete cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IClustersOperations operations, string resourceGroupName, string clusterName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteWithHttpMessagesAsync(resourceGroupName, clusterName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// List cluster resource by resource group
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
public static IPage<Cluster> ListByResourceGroup(this IClustersOperations operations, string resourceGroupName)
{
return operations.ListByResourceGroupAsync(resourceGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// List cluster resource by resource group
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<Cluster>> ListByResourceGroupAsync(this IClustersOperations operations, string resourceGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupWithHttpMessagesAsync(resourceGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// List cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
public static IPage<Cluster> List(this IClustersOperations operations)
{
return operations.ListAsync().GetAwaiter().GetResult();
}
/// <summary>
/// List cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<Cluster>> ListAsync(this IClustersOperations operations, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Update cluster configuration
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterUpdateParameters'>
/// The parameters which contains the property value and property name which
/// used to update the cluster configuration
/// </param>
public static Cluster BeginUpdate(this IClustersOperations operations, string resourceGroupName, string clusterName, ClusterUpdateParameters clusterUpdateParameters)
{
return operations.BeginUpdateAsync(resourceGroupName, clusterName, clusterUpdateParameters).GetAwaiter().GetResult();
}
/// <summary>
/// Update cluster configuration
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterUpdateParameters'>
/// The parameters which contains the property value and property name which
/// used to update the cluster configuration
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Cluster> BeginUpdateAsync(this IClustersOperations operations, string resourceGroupName, string clusterName, ClusterUpdateParameters clusterUpdateParameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginUpdateWithHttpMessagesAsync(resourceGroupName, clusterName, clusterUpdateParameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Create cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterResource'>
/// Put Request
/// </param>
public static Cluster BeginCreate(this IClustersOperations operations, string resourceGroupName, string clusterName, Cluster clusterResource)
{
return operations.BeginCreateAsync(resourceGroupName, clusterName, clusterResource).GetAwaiter().GetResult();
}
/// <summary>
/// Create cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to which the resource belongs or get created
/// </param>
/// <param name='clusterName'>
/// The name of the cluster resource
/// </param>
/// <param name='clusterResource'>
/// Put Request
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<Cluster> BeginCreateAsync(this IClustersOperations operations, string resourceGroupName, string clusterName, Cluster clusterResource, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginCreateWithHttpMessagesAsync(resourceGroupName, clusterName, clusterResource, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// List cluster resource by resource group
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<Cluster> ListByResourceGroupNext(this IClustersOperations operations, string nextPageLink)
{
return operations.ListByResourceGroupNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// List cluster resource by resource group
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<Cluster>> ListByResourceGroupNextAsync(this IClustersOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// List cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<Cluster> ListNext(this IClustersOperations operations, string nextPageLink)
{
return operations.ListNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// List cluster resource
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<Cluster>> ListNextAsync(this IClustersOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
using System;
using System.IO;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
using UnityEngine;
using UnityEditor;
using UnityEditor.SceneManagement;
namespace nwTools
{
public class SceneExporter : ScriptableObject
{
[MenuItem ("nwTools/Export All")]
public static void DoExportAll() { ExportScene(false); }
[MenuItem ("nwTools/Export Selected")]
public static void DoExportSelected() { ExportScene(true); }
static void reset()
{
BundleResource.Reset();
BundleComponent.Reset();
BundleScene.Reset();
BundleGameObject.Reset();
BundleComponent.RegisterStandardComponents();
MeshExporter.Reset();
MaterialExporter.Reset();
}
public static SceneData GenerateSceneData( bool onlySelected )
{
// reset the exporter in case there was an error, Unity doesn't cleanly load/unload editor assemblies
reset();
BundleScene.sceneName = Path.GetFileNameWithoutExtension(EditorSceneManager.GetActiveScene().name);
BundleScene scene = BundleScene.TraverseScene(onlySelected);
scene.Preprocess();
scene.Process();
scene.PostProcess();
SceneData sceneData = scene.GetSceneData() as SceneData;
reset();
return sceneData;
}
public static void ExportScene( bool onlySelected )
{
var defaultFileName = Path.GetFileNameWithoutExtension(EditorSceneManager.GetActiveScene().name) + ".osg";
var path = EditorUtility.SaveFilePanel( "Export Scene to Bundler", "", defaultFileName, "osg" );
if ( path.Length!=0 )
{
EditorUtility.DisplayProgressBar( "Scene Bundler", "Start exporting...", 0.0f );
SceneData sceneData = GenerateSceneData(onlySelected);
float numHierarchy = (float)sceneData.hierarchy.Count, numDone = 0.0f;
string osgData = ExportHeaderOSG( ref sceneData );
foreach ( SceneGameObject obj in sceneData.hierarchy )
{
osgData += ExportHierarchy( ref sceneData, obj, 2, numDone, numHierarchy ); numDone += 1.0f;
EditorUtility.DisplayProgressBar( "Scene Bundler", "Exporting hierarchy...", numDone/numHierarchy );
}
osgData += "}\n";
EditorUtility.DisplayProgressBar( "Scene Bundler", "Writing to " + path, 1.0f );
System.IO.File.WriteAllText( path, osgData );
EditorUtility.ClearProgressBar();
EditorUtility.DisplayDialog( "Scene Bundler", "Export Successful", "OK" );
}
}
private static string ExportCommonAttr( string name, string spaces, bool isCullingActive )
{
string osgData = spaces + " DataVariance STATIC\n"
+ spaces + " name \"" + name + "\"\n"
+ spaces + " nodeMask 0xffffffff\n"
+ spaces + " cullingActive " + (isCullingActive ? "TRUE" : "FALSE") + "\n";
return osgData;
}
private static string ExportHeaderOSG( ref SceneData sceneData )
{
string osgData = "Group {\n" + ExportCommonAttr(sceneData.name, "", true)
+ " num_children " + sceneData.hierarchy.Count + "\n";
return osgData;
}
private static string ExportHierarchy( ref SceneData sceneData, SceneGameObject gameObj,
int indent, float progressStart, float progressAll )
{
int needGlobalNodeType = -1;
if ( gameObj.components.Count<=0 ) return "";
string osgData = "", osgSubData = "", spaces = "";
for ( int i=0; i<indent; ++i ) spaces += " ";
// Check the main component type as the node type
SceneComponent mainComponent = gameObj.components[0];
if ( mainComponent.type=="Transform" )
{
SceneTransform st = (SceneTransform)mainComponent;
osgData = spaces + "MatrixTransform {\n"
+ spaces + " referenceFrame RELATIVE\n"
+ spaces + " Matrix {\n";
needGlobalNodeType = 0;
// FIXME: hould convert left-handed to right-handed coordinates
Matrix4x4 m = Matrix4x4.TRS(st.localPosition, st.localRotation, st.localScale);
osgData += spaces + " " + m[0, 0] + " " + m[1, 0] + " " + m[2, 0] + " " + m[3, 0] + "\n"
+ spaces + " " + m[0, 1] + " " + m[1, 1] + " " + m[2, 1] + " " + m[3, 1] + "\n"
+ spaces + " " + m[0, 2] + " " + m[1, 2] + " " + m[2, 2] + " " + m[3, 2] + "\n"
+ spaces + " " + m[0, 3] + " " + m[1, 3] + " " + m[2, 3] + " " + m[3, 3] + "\n"
+ spaces + " }\n";
}
else
Debug.LogWarning( "[UnityToSceneBundle] Unknown main component type: " + mainComponent.type );
if ( needGlobalNodeType<0 ) osgData = spaces + "Node {\n";
osgData += ExportCommonAttr(gameObj.name, spaces, true)
+ spaces + " num_children ";
// Traverse all components to add them to main component type
string subSpaces = spaces + " ";
int numChildren = gameObj.children.Count;
for ( int i=1; i<gameObj.components.Count; ++i )
{
SceneComponent component = gameObj.components[i];
if ( component.type=="Light" )
{
SceneLight sl = (SceneLight)component;
osgSubData += spaces + " nwTools::LightData {\n"
+ subSpaces + "Type " + sl.lightType + "\n"
+ subSpaces + "Color " + sl.color.r + " " + sl.color.g + " " + sl.color.b + "\n"
+ subSpaces + "Range " + sl.range + "\n"
+ subSpaces + "Realtime " + (sl.realtime ? 1 : 0) + " " + (sl.castsShadows ? 1 : 0) + "\n"
+ spaces + " }\n";
numChildren++;
}
else if ( component.type=="Camera" )
{
SceneCamera sc = (SceneCamera)component;
osgSubData += spaces + " nwTools::CameraData {\n"
// TODO
+ spaces + " }\n";
numChildren++;
}
else if ( component.type=="BoxCollider" )
{
//SceneBoxCollider sbc = (SceneBoxCollider)component;
// TODO
}
else if ( component.type=="MeshCollider" )
{
//SceneMeshCollider smc = (SceneMeshCollider)component;
// TODO
}
else if ( component.type=="ParticleSystem" )
{
SceneParticleSystem sps = (SceneParticleSystem)component;
osgSubData += spaces + " nwTools::ParticleSystem {\n"
+ ExportCommonAttr(sps.type, spaces + " ", false)
+ ParticleExporter.ExportParticle(ref sceneData, ref sps, subSpaces)
+ spaces + " }\n";
numChildren++;
}
else if ( component.type=="Terrain" )
{
SceneTerrain st = (SceneTerrain)component;
osgSubData += spaces + " Geode {\n"
+ ExportCommonAttr(st.type, spaces + " ", true)
+ subSpaces + "num_drawables 1\n";
osgSubData += subSpaces + "nwTools::Terrain {\n"
+ TerrainExporter.ExportTerrain(ref sceneData, ref st, subSpaces + " ")
+ subSpaces + "}\n";
osgSubData += spaces + " }\n";
numChildren++;
}
else if ( component.type=="MeshRenderer" )
{
SceneMeshRenderer smr = (SceneMeshRenderer)component;
osgSubData += spaces + " Geode {\n"
+ ExportCommonAttr(smr.type, spaces + " ", true)
+ subSpaces + "num_drawables 1\n";
SceneMesh mesh = sceneData.resources.GetMesh(smr.mesh);
osgSubData += subSpaces + "Geometry {\n"
+ MeshExporter.ExportGeometry(ref sceneData, ref smr, ref mesh, subSpaces + " ")
+ subSpaces + "}\n";
osgSubData += spaces + " }\n";
numChildren++;
}
else if ( component.type=="SkinnedMeshRenderer" )
{
SceneSkinnedMeshRenderer smr = (SceneSkinnedMeshRenderer)component;
osgSubData += spaces + " Geode {\n"
+ ExportCommonAttr(smr.type, spaces + " ", true)
+ subSpaces + "num_drawables 1\n";
SceneMesh mesh = sceneData.resources.GetMesh(smr.mesh);
osgSubData += subSpaces + "Geometry {\n"
+ MeshExporter.ExportSkinnedGeometry(ref sceneData, ref smr, ref mesh, subSpaces + " ")
+ subSpaces + "}\n";
osgSubData += spaces + " }\n";
numChildren++;
}
else
Debug.LogWarning( "[UnityToSceneBundle] Unknown sub-component type: " + component.type );
}
osgData += numChildren + "\n" + osgSubData;
// Traverse all child objects
float numHierarchy = (float)gameObj.children.Count, numDone = 0.0f;
foreach ( SceneGameObject childObj in gameObj.children )
{
osgData += ExportHierarchy( ref sceneData, childObj, indent + 2, 0.0f, 0.0f ); numDone += 1.0f;
if ( progressAll>0.0f )
{
float progress = (progressStart + numDone / numHierarchy) / progressAll;
EditorUtility.DisplayProgressBar( "Scene Bundler", "Exporting hierarchy...", progress );
}
}
osgData += spaces + "}\n";
return osgData;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.ExceptionServices;
using System.Security.Authentication;
using System.Security.Authentication.ExtendedProtection;
using System.Security.Cryptography.X509Certificates;
using System.Threading;
using System.Threading.Tasks;
namespace System.Net.Security
{
internal class SslState
{
private static int s_uniqueNameInteger = 123;
private static AsyncProtocolCallback s_partialFrameCallback = new AsyncProtocolCallback(PartialFrameCallback);
private static AsyncProtocolCallback s_readFrameCallback = new AsyncProtocolCallback(ReadFrameCallback);
private static AsyncCallback s_writeCallback = new AsyncCallback(WriteCallback);
internal SslAuthenticationOptions _sslAuthenticationOptions;
private Stream _innerStream;
private SslStreamInternal _secureStream;
private int _nestedAuth;
private SecureChannel _context;
private bool _handshakeCompleted;
private bool _shutdown;
private SecurityStatusPal _securityStatus;
private ExceptionDispatchInfo _exception;
private enum CachedSessionStatus : byte
{
Unknown = 0,
IsNotCached = 1,
IsCached = 2,
Renegotiated = 3
}
private CachedSessionStatus _CachedSession;
// This block is used by re-handshake code to buffer data decrypted with the old key.
private byte[] _queuedReadData;
private int _queuedReadCount;
private bool _pendingReHandshake;
private const int MaxQueuedReadBytes = 1024 * 128;
//
// This block is used to rule the >>re-handshakes<< that are concurrent with read/write I/O requests.
//
private const int LockNone = 0;
private const int LockWrite = 1;
private const int LockHandshake = 2;
private const int LockPendingWrite = 3;
private const int LockRead = 4;
private const int LockPendingRead = 6;
private int _lockWriteState;
private object _queuedWriteStateRequest;
private int _lockReadState;
private object _queuedReadStateRequest;
//
// The public Client and Server classes enforce the parameters rules before
// calling into this .ctor.
//
internal SslState(Stream innerStream)
{
_innerStream = innerStream;
}
internal void ValidateCreateContext(SslClientAuthenticationOptions sslClientAuthenticationOptions)
{
if (_exception != null)
{
_exception.Throw();
}
if (Context != null && Context.IsValidContext)
{
throw new InvalidOperationException(SR.net_auth_reauth);
}
if (Context != null && IsServer)
{
throw new InvalidOperationException(SR.net_auth_client_server);
}
if (sslClientAuthenticationOptions.TargetHost == null)
{
throw new ArgumentNullException(nameof(sslClientAuthenticationOptions.TargetHost));
}
if (sslClientAuthenticationOptions.TargetHost.Length == 0)
{
sslClientAuthenticationOptions.TargetHost = "?" + Interlocked.Increment(ref s_uniqueNameInteger).ToString(NumberFormatInfo.InvariantInfo);
}
_exception = null;
try
{
_sslAuthenticationOptions = new SslAuthenticationOptions(sslClientAuthenticationOptions);
_context = new SecureChannel(_sslAuthenticationOptions);
}
catch (Win32Exception e)
{
throw new AuthenticationException(SR.net_auth_SSPI, e);
}
}
internal void ValidateCreateContext(SslServerAuthenticationOptions sslServerAuthenticationOptions)
{
if (_exception != null)
{
_exception.Throw();
}
if (Context != null && Context.IsValidContext)
{
throw new InvalidOperationException(SR.net_auth_reauth);
}
if (Context != null && !IsServer)
{
throw new InvalidOperationException(SR.net_auth_client_server);
}
if (sslServerAuthenticationOptions.ServerCertificate == null)
{
throw new ArgumentNullException(nameof(sslServerAuthenticationOptions.ServerCertificate));
}
_exception = null;
try
{
_sslAuthenticationOptions = new SslAuthenticationOptions(sslServerAuthenticationOptions);
_context = new SecureChannel(_sslAuthenticationOptions);
}
catch (Win32Exception e)
{
throw new AuthenticationException(SR.net_auth_SSPI, e);
}
}
internal SslApplicationProtocol NegotiatedApplicationProtocol
{
get
{
if (Context == null)
return default;
return Context.NegotiatedApplicationProtocol;
}
}
internal bool IsAuthenticated
{
get
{
return _context != null && _context.IsValidContext && _exception == null && HandshakeCompleted;
}
}
internal bool IsMutuallyAuthenticated
{
get
{
return
IsAuthenticated &&
(Context.IsServer ? Context.LocalServerCertificate : Context.LocalClientCertificate) != null &&
Context.IsRemoteCertificateAvailable; /* does not work: Context.IsMutualAuthFlag;*/
}
}
internal bool RemoteCertRequired
{
get
{
return Context == null || Context.RemoteCertRequired;
}
}
internal bool IsServer
{
get
{
return Context != null && Context.IsServer;
}
}
//
// This will return selected local cert for both client/server streams
//
internal X509Certificate LocalCertificate
{
get
{
CheckThrow(true);
return InternalLocalCertificate;
}
}
private X509Certificate InternalLocalCertificate
{
get
{
return Context.IsServer ? Context.LocalServerCertificate : Context.LocalClientCertificate;
}
}
internal ChannelBinding GetChannelBinding(ChannelBindingKind kind)
{
return (Context == null) ? null : Context.GetChannelBinding(kind);
}
internal bool CheckCertRevocationStatus
{
get
{
return Context != null && Context.CheckCertRevocationStatus != X509RevocationMode.NoCheck;
}
}
internal bool IsShutdown
{
get
{
return _shutdown;
}
}
internal CipherAlgorithmType CipherAlgorithm
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return CipherAlgorithmType.None;
}
return (CipherAlgorithmType)info.DataCipherAlg;
}
}
internal int CipherStrength
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return 0;
}
return info.DataKeySize;
}
}
internal HashAlgorithmType HashAlgorithm
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return (HashAlgorithmType)0;
}
return (HashAlgorithmType)info.DataHashAlg;
}
}
internal int HashStrength
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return 0;
}
return info.DataHashKeySize;
}
}
internal ExchangeAlgorithmType KeyExchangeAlgorithm
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return (ExchangeAlgorithmType)0;
}
return (ExchangeAlgorithmType)info.KeyExchangeAlg;
}
}
internal int KeyExchangeStrength
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return 0;
}
return info.KeyExchKeySize;
}
}
internal SslProtocols SslProtocol
{
get
{
CheckThrow(true);
SslConnectionInfo info = Context.ConnectionInfo;
if (info == null)
{
return SslProtocols.None;
}
SslProtocols proto = (SslProtocols)info.Protocol;
SslProtocols ret = SslProtocols.None;
#pragma warning disable 0618 // Ssl2, Ssl3 are deprecated.
// Restore client/server bits so the result maps exactly on published constants.
if ((proto & SslProtocols.Ssl2) != 0)
{
ret |= SslProtocols.Ssl2;
}
if ((proto & SslProtocols.Ssl3) != 0)
{
ret |= SslProtocols.Ssl3;
}
#pragma warning restore
if ((proto & SslProtocols.Tls) != 0)
{
ret |= SslProtocols.Tls;
}
if ((proto & SslProtocols.Tls11) != 0)
{
ret |= SslProtocols.Tls11;
}
if ((proto & SslProtocols.Tls12) != 0)
{
ret |= SslProtocols.Tls12;
}
return ret;
}
}
internal Stream InnerStream
{
get
{
return _innerStream;
}
}
internal SslStreamInternal SecureStream
{
get
{
CheckThrow(true);
if (_secureStream == null)
{
Interlocked.CompareExchange<SslStreamInternal>(ref _secureStream, new SslStreamInternal(this), null);
}
return _secureStream;
}
}
internal int MaxDataSize
{
get
{
return Context.MaxDataSize;
}
}
private ExceptionDispatchInfo SetException(Exception e)
{
Debug.Assert(e != null, $"Expected non-null Exception to be passed to {nameof(SetException)}");
if (_exception == null)
{
_exception = ExceptionDispatchInfo.Capture(e);
}
if (_exception != null && Context != null)
{
Context.Close();
}
return _exception;
}
private bool HandshakeCompleted
{
get
{
return _handshakeCompleted;
}
}
private SecureChannel Context
{
get
{
return _context;
}
}
internal void CheckThrow(bool authSuccessCheck, bool shutdownCheck = false)
{
if (_exception != null)
{
_exception.Throw();
}
if (authSuccessCheck && !IsAuthenticated)
{
throw new InvalidOperationException(SR.net_auth_noauth);
}
if (shutdownCheck && _shutdown)
{
throw new InvalidOperationException(SR.net_ssl_io_already_shutdown);
}
}
internal void Flush()
{
InnerStream.Flush();
}
internal Task FlushAsync(CancellationToken cancellationToken)
{
return InnerStream.FlushAsync(cancellationToken);
}
//
// This is to not depend on GC&SafeHandle class if the context is not needed anymore.
//
internal void Close()
{
_exception = ExceptionDispatchInfo.Capture(new ObjectDisposedException("SslStream"));
if (Context != null)
{
Context.Close();
}
}
internal SecurityStatusPal EncryptData(ReadOnlyMemory<byte> buffer, ref byte[] outBuffer, out int outSize)
{
CheckThrow(true);
return Context.Encrypt(buffer, ref outBuffer, out outSize);
}
internal SecurityStatusPal DecryptData(byte[] buffer, ref int offset, ref int count)
{
CheckThrow(true);
return PrivateDecryptData(buffer, ref offset, ref count);
}
private SecurityStatusPal PrivateDecryptData(byte[] buffer, ref int offset, ref int count)
{
return Context.Decrypt(buffer, ref offset, ref count);
}
//
// Called by re-handshake if found data decrypted with the old key
//
private Exception EnqueueOldKeyDecryptedData(byte[] buffer, int offset, int count)
{
lock (this)
{
if (_queuedReadCount + count > MaxQueuedReadBytes)
{
return new IOException(SR.Format(SR.net_auth_ignored_reauth, MaxQueuedReadBytes.ToString(NumberFormatInfo.CurrentInfo)));
}
if (count != 0)
{
// This is inefficient yet simple and that should be a rare case of receiving data encrypted with "old" key.
_queuedReadData = EnsureBufferSize(_queuedReadData, _queuedReadCount, _queuedReadCount + count);
Buffer.BlockCopy(buffer, offset, _queuedReadData, _queuedReadCount, count);
_queuedReadCount += count;
FinishHandshakeRead(LockHandshake);
}
}
return null;
}
//
// When re-handshaking the "old" key decrypted data are queued until the handshake is done.
// When stream calls for decryption we will feed it queued data left from "old" encryption key.
//
// Must be called under the lock in case concurrent handshake is going.
//
internal int CheckOldKeyDecryptedData(Memory<byte> buffer)
{
CheckThrow(true);
if (_queuedReadData != null)
{
// This is inefficient yet simple and should be a REALLY rare case.
int toCopy = Math.Min(_queuedReadCount, buffer.Length);
new Span<byte>(_queuedReadData, 0, toCopy).CopyTo(buffer.Span);
_queuedReadCount -= toCopy;
if (_queuedReadCount == 0)
{
_queuedReadData = null;
}
else
{
Buffer.BlockCopy(_queuedReadData, toCopy, _queuedReadData, 0, _queuedReadCount);
}
return toCopy;
}
return -1;
}
//
// This method assumes that a SSPI context is already in a good shape.
// For example it is either a fresh context or already authenticated context that needs renegotiation.
//
internal void ProcessAuthentication(LazyAsyncResult lazyResult)
{
if (Interlocked.Exchange(ref _nestedAuth, 1) == 1)
{
throw new InvalidOperationException(SR.Format(SR.net_io_invalidnestedcall, lazyResult == null ? "BeginAuthenticate" : "Authenticate", "authenticate"));
}
try
{
CheckThrow(false);
AsyncProtocolRequest asyncRequest = null;
if (lazyResult != null)
{
asyncRequest = new AsyncProtocolRequest(lazyResult);
asyncRequest.Buffer = null;
#if DEBUG
lazyResult._debugAsyncChain = asyncRequest;
#endif
}
// A trick to discover and avoid cached sessions.
_CachedSession = CachedSessionStatus.Unknown;
ForceAuthentication(Context.IsServer, null, asyncRequest);
// Not aync so the connection is completed at this point.
if (lazyResult == null && NetEventSource.IsEnabled)
{
if (NetEventSource.IsEnabled)
NetEventSource.Log.SspiSelectedCipherSuite(nameof(ProcessAuthentication),
SslProtocol,
CipherAlgorithm,
CipherStrength,
HashAlgorithm,
HashStrength,
KeyExchangeAlgorithm,
KeyExchangeStrength);
}
}
catch (Exception)
{
// If an exception emerges synchronously, the asynchronous operation was not
// initiated, so no operation is in progress.
_nestedAuth = 0;
throw;
}
finally
{
// For synchronous operations, the operation has completed.
if (lazyResult == null)
{
_nestedAuth = 0;
}
}
}
//
// This is used to reply on re-handshake when received SEC_I_RENEGOTIATE on Read().
//
internal void ReplyOnReAuthentication(byte[] buffer)
{
lock (this)
{
// Note we are already inside the read, so checking for already going concurrent handshake.
_lockReadState = LockHandshake;
if (_pendingReHandshake)
{
// A concurrent handshake is pending, resume.
FinishRead(buffer);
return;
}
}
// Start rehandshake from here.
// Forcing async mode. The caller will queue another Read as soon as we return using its preferred
// calling convention, which will be woken up when the handshake completes. The callback is just
// to capture any SocketErrors that happen during the handshake so they can be surfaced from the Read.
AsyncProtocolRequest asyncRequest = new AsyncProtocolRequest(new LazyAsyncResult(this, null, new AsyncCallback(RehandshakeCompleteCallback)));
// Buffer contains a result from DecryptMessage that will be passed to ISC/ASC
asyncRequest.Buffer = buffer;
ForceAuthentication(false, buffer, asyncRequest);
}
//
// This method attempts to start authentication.
// Incoming buffer is either null or is the result of "renegotiate" decrypted message
// If write is in progress the method will either wait or be put on hold
//
private void ForceAuthentication(bool receiveFirst, byte[] buffer, AsyncProtocolRequest asyncRequest)
{
if (CheckEnqueueHandshake(buffer, asyncRequest))
{
// Async handshake is enqueued and will resume later.
return;
}
// Either Sync handshake is ready to go or async handshake won the race over write.
// This will tell that we don't know the framing yet (what SSL version is)
_Framing = Framing.Unknown;
try
{
if (receiveFirst)
{
// Listen for a client blob.
StartReceiveBlob(buffer, asyncRequest);
}
else
{
// We start with the first blob.
StartSendBlob(buffer, (buffer == null ? 0 : buffer.Length), asyncRequest);
}
}
catch (Exception e)
{
// Failed auth, reset the framing if any.
_Framing = Framing.Unknown;
_handshakeCompleted = false;
if (SetException(e).SourceException == e)
{
throw;
}
else
{
_exception.Throw();
}
}
finally
{
if (_exception != null)
{
// This a failed handshake. Release waiting IO if any.
FinishHandshake(null, null);
}
}
}
internal void EndProcessAuthentication(IAsyncResult result)
{
if (result == null)
{
throw new ArgumentNullException("asyncResult");
}
LazyAsyncResult lazyResult = result as LazyAsyncResult;
if (lazyResult == null)
{
throw new ArgumentException(SR.Format(SR.net_io_async_result, result.GetType().FullName), "asyncResult");
}
if (Interlocked.Exchange(ref _nestedAuth, 0) == 0)
{
throw new InvalidOperationException(SR.Format(SR.net_io_invalidendcall, "EndAuthenticate"));
}
InternalEndProcessAuthentication(lazyResult);
// Connection is completed at this point.
if (NetEventSource.IsEnabled)
{
if (NetEventSource.IsEnabled)
NetEventSource.Log.SspiSelectedCipherSuite(nameof(EndProcessAuthentication),
SslProtocol,
CipherAlgorithm,
CipherStrength,
HashAlgorithm,
HashStrength,
KeyExchangeAlgorithm,
KeyExchangeStrength);
}
}
internal void InternalEndProcessAuthentication(LazyAsyncResult lazyResult)
{
// No "artificial" timeouts implemented so far, InnerStream controls that.
lazyResult.InternalWaitForCompletion();
Exception e = lazyResult.Result as Exception;
if (e != null)
{
// Failed auth, reset the framing if any.
_Framing = Framing.Unknown;
_handshakeCompleted = false;
SetException(e).Throw();
}
}
//
// Client side starts here, but server also loops through this method.
//
private void StartSendBlob(byte[] incoming, int count, AsyncProtocolRequest asyncRequest)
{
ProtocolToken message = Context.NextMessage(incoming, 0, count);
_securityStatus = message.Status;
if (message.Size != 0)
{
if (Context.IsServer && _CachedSession == CachedSessionStatus.Unknown)
{
//
//[Schannel] If the first call to ASC returns a token less than 200 bytes,
// then it's a reconnect (a handshake based on a cache entry).
//
_CachedSession = message.Size < 200 ? CachedSessionStatus.IsCached : CachedSessionStatus.IsNotCached;
}
if (_Framing == Framing.Unified)
{
_Framing = DetectFraming(message.Payload, message.Payload.Length);
}
if (asyncRequest == null)
{
InnerStream.Write(message.Payload, 0, message.Size);
}
else
{
asyncRequest.AsyncState = message;
Task t = InnerStream.WriteAsync(message.Payload, 0, message.Size);
if (t.IsCompleted)
{
t.GetAwaiter().GetResult();
}
else
{
IAsyncResult ar = TaskToApm.Begin(t, s_writeCallback, asyncRequest);
if (!ar.CompletedSynchronously)
{
#if DEBUG
asyncRequest._DebugAsyncChain = ar;
#endif
return;
}
TaskToApm.End(ar);
}
}
}
CheckCompletionBeforeNextReceive(message, asyncRequest);
}
//
// This will check and logically complete / fail the auth handshake.
//
private void CheckCompletionBeforeNextReceive(ProtocolToken message, AsyncProtocolRequest asyncRequest)
{
if (message.Failed)
{
StartSendAuthResetSignal(null, asyncRequest, ExceptionDispatchInfo.Capture(new AuthenticationException(SR.net_auth_SSPI, message.GetException())));
return;
}
else if (message.Done && !_pendingReHandshake)
{
ProtocolToken alertToken = null;
if (!CompleteHandshake(ref alertToken))
{
StartSendAuthResetSignal(alertToken, asyncRequest, ExceptionDispatchInfo.Capture(new AuthenticationException(SR.net_ssl_io_cert_validation, null)));
return;
}
// Release waiting IO if any. Presumably it should not throw.
// Otherwise application may get not expected type of the exception.
FinishHandshake(null, asyncRequest);
return;
}
StartReceiveBlob(message.Payload, asyncRequest);
}
//
// Server side starts here, but client also loops through this method.
//
private void StartReceiveBlob(byte[] buffer, AsyncProtocolRequest asyncRequest)
{
if (_pendingReHandshake)
{
if (CheckEnqueueHandshakeRead(ref buffer, asyncRequest))
{
return;
}
if (!_pendingReHandshake)
{
// Renegotiate: proceed to the next step.
ProcessReceivedBlob(buffer, buffer.Length, asyncRequest);
return;
}
}
//This is first server read.
buffer = EnsureBufferSize(buffer, 0, SecureChannel.ReadHeaderSize);
int readBytes = 0;
if (asyncRequest == null)
{
readBytes = FixedSizeReader.ReadPacket(_innerStream, buffer, 0, SecureChannel.ReadHeaderSize);
}
else
{
asyncRequest.SetNextRequest(buffer, 0, SecureChannel.ReadHeaderSize, s_partialFrameCallback);
FixedSizeReader.ReadPacketAsync(_innerStream, asyncRequest);
if (!asyncRequest.MustCompleteSynchronously)
{
return;
}
readBytes = asyncRequest.Result;
}
StartReadFrame(buffer, readBytes, asyncRequest);
}
//
private void StartReadFrame(byte[] buffer, int readBytes, AsyncProtocolRequest asyncRequest)
{
if (readBytes == 0)
{
// EOF received
throw new IOException(SR.net_auth_eof);
}
if (_Framing == Framing.Unknown)
{
_Framing = DetectFraming(buffer, readBytes);
}
int restBytes = GetRemainingFrameSize(buffer, 0, readBytes);
if (restBytes < 0)
{
throw new IOException(SR.net_ssl_io_frame);
}
if (restBytes == 0)
{
// EOF received
throw new AuthenticationException(SR.net_auth_eof, null);
}
buffer = EnsureBufferSize(buffer, readBytes, readBytes + restBytes);
if (asyncRequest == null)
{
restBytes = FixedSizeReader.ReadPacket(_innerStream, buffer, readBytes, restBytes);
}
else
{
asyncRequest.SetNextRequest(buffer, readBytes, restBytes, s_readFrameCallback);
FixedSizeReader.ReadPacketAsync(_innerStream, asyncRequest);
if (!asyncRequest.MustCompleteSynchronously)
{
return;
}
restBytes = asyncRequest.Result;
if (restBytes == 0)
{
//EOF received: fail.
readBytes = 0;
}
}
ProcessReceivedBlob(buffer, readBytes + restBytes, asyncRequest);
}
private void ProcessReceivedBlob(byte[] buffer, int count, AsyncProtocolRequest asyncRequest)
{
if (count == 0)
{
// EOF received.
throw new AuthenticationException(SR.net_auth_eof, null);
}
if (_pendingReHandshake)
{
int offset = 0;
SecurityStatusPal status = PrivateDecryptData(buffer, ref offset, ref count);
if (status.ErrorCode == SecurityStatusPalErrorCode.OK)
{
Exception e = EnqueueOldKeyDecryptedData(buffer, offset, count);
if (e != null)
{
StartSendAuthResetSignal(null, asyncRequest, ExceptionDispatchInfo.Capture(e));
return;
}
_Framing = Framing.Unknown;
StartReceiveBlob(buffer, asyncRequest);
return;
}
else if (status.ErrorCode != SecurityStatusPalErrorCode.Renegotiate)
{
// Fail re-handshake.
ProtocolToken message = new ProtocolToken(null, status);
StartSendAuthResetSignal(null, asyncRequest, ExceptionDispatchInfo.Capture(new AuthenticationException(SR.net_auth_SSPI, message.GetException())));
return;
}
// We expect only handshake messages from now.
_pendingReHandshake = false;
if (offset != 0)
{
Buffer.BlockCopy(buffer, offset, buffer, 0, count);
}
}
StartSendBlob(buffer, count, asyncRequest);
}
//
// This is to reset auth state on remote side.
// If this write succeeds we will allow auth retrying.
//
private void StartSendAuthResetSignal(ProtocolToken message, AsyncProtocolRequest asyncRequest, ExceptionDispatchInfo exception)
{
if (message == null || message.Size == 0)
{
//
// We don't have an alert to send so cannot retry and fail prematurely.
//
exception.Throw();
}
if (asyncRequest == null)
{
InnerStream.Write(message.Payload, 0, message.Size);
}
else
{
asyncRequest.AsyncState = exception;
Task t = InnerStream.WriteAsync(message.Payload, 0, message.Size);
if (t.IsCompleted)
{
t.GetAwaiter().GetResult();
}
else
{
IAsyncResult ar = TaskToApm.Begin(t, s_writeCallback, asyncRequest);
if (!ar.CompletedSynchronously)
{
return;
}
TaskToApm.End(ar);
}
}
exception.Throw();
}
// - Loads the channel parameters
// - Optionally verifies the Remote Certificate
// - Sets HandshakeCompleted flag
// - Sets the guarding event if other thread is waiting for
// handshake completion
//
// - Returns false if failed to verify the Remote Cert
//
private bool CompleteHandshake(ref ProtocolToken alertToken)
{
if (NetEventSource.IsEnabled)
NetEventSource.Enter(this);
Context.ProcessHandshakeSuccess();
if (!Context.VerifyRemoteCertificate(_sslAuthenticationOptions.CertValidationDelegate, ref alertToken))
{
_handshakeCompleted = false;
if (NetEventSource.IsEnabled)
NetEventSource.Exit(this, false);
return false;
}
_handshakeCompleted = true;
if (NetEventSource.IsEnabled)
NetEventSource.Exit(this, true);
return true;
}
private static void WriteCallback(IAsyncResult transportResult)
{
if (transportResult.CompletedSynchronously)
{
return;
}
AsyncProtocolRequest asyncRequest;
SslState sslState;
#if DEBUG
try
{
#endif
asyncRequest = (AsyncProtocolRequest)transportResult.AsyncState;
sslState = (SslState)asyncRequest.AsyncObject;
#if DEBUG
}
catch (Exception exception) when (!ExceptionCheck.IsFatal(exception))
{
NetEventSource.Fail(null, $"Exception while decoding context: {exception}");
throw;
}
#endif
// Async completion.
try
{
TaskToApm.End(transportResult);
// Special case for an error notification.
object asyncState = asyncRequest.AsyncState;
ExceptionDispatchInfo exception = asyncState as ExceptionDispatchInfo;
if (exception != null)
{
exception.Throw();
}
sslState.CheckCompletionBeforeNextReceive((ProtocolToken)asyncState, asyncRequest);
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
sslState.FinishHandshake(e, asyncRequest);
}
}
private static void PartialFrameCallback(AsyncProtocolRequest asyncRequest)
{
if (NetEventSource.IsEnabled)
NetEventSource.Enter(null);
// Async ONLY completion.
SslState sslState = (SslState)asyncRequest.AsyncObject;
try
{
sslState.StartReadFrame(asyncRequest.Buffer, asyncRequest.Result, asyncRequest);
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
sslState.FinishHandshake(e, asyncRequest);
}
}
//
//
private static void ReadFrameCallback(AsyncProtocolRequest asyncRequest)
{
if (NetEventSource.IsEnabled)
NetEventSource.Enter(null);
// Async ONLY completion.
SslState sslState = (SslState)asyncRequest.AsyncObject;
try
{
if (asyncRequest.Result == 0)
{
//EOF received: will fail.
asyncRequest.Offset = 0;
}
sslState.ProcessReceivedBlob(asyncRequest.Buffer, asyncRequest.Offset + asyncRequest.Result, asyncRequest);
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
sslState.FinishHandshake(e, asyncRequest);
}
}
private bool CheckEnqueueHandshakeRead(ref byte[] buffer, AsyncProtocolRequest request)
{
LazyAsyncResult lazyResult = null;
lock (this)
{
if (_lockReadState == LockPendingRead)
{
return false;
}
int lockState = Interlocked.Exchange(ref _lockReadState, LockHandshake);
if (lockState != LockRead)
{
return false;
}
if (request != null)
{
_queuedReadStateRequest = request;
return true;
}
lazyResult = new LazyAsyncResult(null, null, /*must be */ null);
_queuedReadStateRequest = lazyResult;
}
// Need to exit from lock before waiting.
lazyResult.InternalWaitForCompletion();
buffer = (byte[])lazyResult.Result;
return false;
}
private void FinishHandshakeRead(int newState)
{
lock (this)
{
// Lock is redundant here. Included for clarity.
int lockState = Interlocked.Exchange(ref _lockReadState, newState);
if (lockState != LockPendingRead)
{
return;
}
_lockReadState = LockRead;
HandleQueuedCallback(ref _queuedReadStateRequest);
}
}
// Returns:
// -1 - proceed
// 0 - queued
// X - some bytes are ready, no need for IO
internal int CheckEnqueueRead(Memory<byte> buffer)
{
int lockState = Interlocked.CompareExchange(ref _lockReadState, LockRead, LockNone);
if (lockState != LockHandshake)
{
// Proceed, no concurrent handshake is ongoing so no need for a lock.
return CheckOldKeyDecryptedData(buffer);
}
LazyAsyncResult lazyResult = null;
lock (this)
{
int result = CheckOldKeyDecryptedData(buffer);
if (result != -1)
{
return result;
}
// Check again under lock.
if (_lockReadState != LockHandshake)
{
// The other thread has finished before we grabbed the lock.
_lockReadState = LockRead;
return -1;
}
_lockReadState = LockPendingRead;
lazyResult = new LazyAsyncResult(null, null, /*must be */ null);
_queuedReadStateRequest = lazyResult;
}
// Need to exit from lock before waiting.
lazyResult.InternalWaitForCompletion();
lock (this)
{
return CheckOldKeyDecryptedData(buffer);
}
}
internal ValueTask<int> CheckEnqueueReadAsync(Memory<byte> buffer)
{
int lockState = Interlocked.CompareExchange(ref _lockReadState, LockRead, LockNone);
if (lockState != LockHandshake)
{
// Proceed, no concurrent handshake is ongoing so no need for a lock.
return new ValueTask<int>(CheckOldKeyDecryptedData(buffer));
}
lock (this)
{
int result = CheckOldKeyDecryptedData(buffer);
if (result != -1)
{
return new ValueTask<int>(result);
}
// Check again under lock.
if (_lockReadState != LockHandshake)
{
// The other thread has finished before we grabbed the lock.
_lockReadState = LockRead;
return new ValueTask<int>(-1);
}
_lockReadState = LockPendingRead;
TaskCompletionSource<int> taskCompletionSource = new TaskCompletionSource<int>(buffer, TaskCreationOptions.RunContinuationsAsynchronously);
_queuedReadStateRequest = taskCompletionSource;
return new ValueTask<int>(taskCompletionSource.Task);
}
}
internal void FinishRead(byte[] renegotiateBuffer)
{
int lockState = Interlocked.CompareExchange(ref _lockReadState, LockNone, LockRead);
if (lockState != LockHandshake)
{
return;
}
lock (this)
{
LazyAsyncResult ar = _queuedReadStateRequest as LazyAsyncResult;
if (ar != null)
{
_queuedReadStateRequest = null;
ar.InvokeCallback(renegotiateBuffer);
}
else
{
AsyncProtocolRequest request = (AsyncProtocolRequest)_queuedReadStateRequest;
request.Buffer = renegotiateBuffer;
_queuedReadStateRequest = null;
ThreadPool.QueueUserWorkItem(new WaitCallback(AsyncResumeHandshakeRead), request);
}
}
}
internal Task CheckEnqueueWriteAsync()
{
// Clear previous request.
int lockState = Interlocked.CompareExchange(ref _lockWriteState, LockWrite, LockNone);
if (lockState != LockHandshake)
{
return Task.CompletedTask;
}
lock (this)
{
if (_lockWriteState != LockHandshake)
{
CheckThrow(authSuccessCheck: true);
return Task.CompletedTask;
}
_lockWriteState = LockPendingWrite;
TaskCompletionSource<int> completionSource = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously);
_queuedWriteStateRequest = completionSource;
return completionSource.Task;
}
}
internal void CheckEnqueueWrite()
{
// Clear previous request.
_queuedWriteStateRequest = null;
int lockState = Interlocked.CompareExchange(ref _lockWriteState, LockWrite, LockNone);
if (lockState != LockHandshake)
{
// Proceed with write.
return;
}
LazyAsyncResult lazyResult = null;
lock (this)
{
if (_lockWriteState != LockHandshake)
{
// Handshake has completed before we grabbed the lock.
CheckThrow(authSuccessCheck: true);
return;
}
_lockWriteState = LockPendingWrite;
lazyResult = new LazyAsyncResult(null, null, /*must be */null);
_queuedWriteStateRequest = lazyResult;
}
// Need to exit from lock before waiting.
lazyResult.InternalWaitForCompletion();
CheckThrow(authSuccessCheck: true);
return;
}
internal void FinishWrite()
{
int lockState = Interlocked.CompareExchange(ref _lockWriteState, LockNone, LockWrite);
if (lockState != LockHandshake)
{
return;
}
lock (this)
{
HandleQueuedCallback(ref _queuedWriteStateRequest);
}
}
private void HandleQueuedCallback(ref object queuedStateRequest)
{
object obj = queuedStateRequest;
if (obj == null)
{
return;
}
queuedStateRequest = null;
switch (obj)
{
case LazyAsyncResult lazy:
lazy.InvokeCallback();
break;
case TaskCompletionSource<int> taskCompletionSource when taskCompletionSource.Task.AsyncState != null:
Memory<byte> array = (Memory<byte>)taskCompletionSource.Task.AsyncState;
taskCompletionSource.SetResult(CheckOldKeyDecryptedData(array));
break;
case TaskCompletionSource<int> taskCompletionSource:
taskCompletionSource.SetResult(0);
break;
default:
ThreadPool.QueueUserWorkItem(new WaitCallback(AsyncResumeHandshake), obj);
break;
}
}
// Returns:
// true - operation queued
// false - operation can proceed
private bool CheckEnqueueHandshake(byte[] buffer, AsyncProtocolRequest asyncRequest)
{
LazyAsyncResult lazyResult = null;
lock (this)
{
if (_lockWriteState == LockPendingWrite)
{
return false;
}
int lockState = Interlocked.Exchange(ref _lockWriteState, LockHandshake);
if (lockState != LockWrite)
{
// Proceed with handshake.
return false;
}
if (asyncRequest != null)
{
asyncRequest.Buffer = buffer;
_queuedWriteStateRequest = asyncRequest;
return true;
}
lazyResult = new LazyAsyncResult(null, null, /*must be*/null);
_queuedWriteStateRequest = lazyResult;
}
lazyResult.InternalWaitForCompletion();
return false;
}
private void FinishHandshake(Exception e, AsyncProtocolRequest asyncRequest)
{
try
{
lock (this)
{
if (e != null)
{
SetException(e);
}
// Release read if any.
FinishHandshakeRead(LockNone);
// If there is a pending write we want to keep it's lock state.
int lockState = Interlocked.CompareExchange(ref _lockWriteState, LockNone, LockHandshake);
if (lockState != LockPendingWrite)
{
return;
}
_lockWriteState = LockWrite;
HandleQueuedCallback(ref _queuedWriteStateRequest);
}
}
finally
{
if (asyncRequest != null)
{
if (e != null)
{
asyncRequest.CompleteUserWithError(e);
}
else
{
asyncRequest.CompleteUser();
}
}
}
}
private static byte[] EnsureBufferSize(byte[] buffer, int copyCount, int size)
{
if (buffer == null || buffer.Length < size)
{
byte[] saved = buffer;
buffer = new byte[size];
if (saved != null && copyCount != 0)
{
Buffer.BlockCopy(saved, 0, buffer, 0, copyCount);
}
}
return buffer;
}
private enum Framing
{
Unknown = 0,
BeforeSSL3,
SinceSSL3,
Unified,
Invalid
}
// This is set on the first packet to figure out the framing style.
private Framing _Framing = Framing.Unknown;
// SSL3/TLS protocol frames definitions.
private enum FrameType : byte
{
ChangeCipherSpec = 20,
Alert = 21,
Handshake = 22,
AppData = 23
}
// We need at least 5 bytes to determine what we have.
private Framing DetectFraming(byte[] bytes, int length)
{
/* PCTv1.0 Hello starts with
* RECORD_LENGTH_MSB (ignore)
* RECORD_LENGTH_LSB (ignore)
* PCT1_CLIENT_HELLO (must be equal)
* PCT1_CLIENT_VERSION_MSB (if version greater than PCTv1)
* PCT1_CLIENT_VERSION_LSB (if version greater than PCTv1)
*
* ... PCT hello ...
*/
/* Microsoft Unihello starts with
* RECORD_LENGTH_MSB (ignore)
* RECORD_LENGTH_LSB (ignore)
* SSL2_CLIENT_HELLO (must be equal)
* SSL2_CLIENT_VERSION_MSB (if version greater than SSLv2) ( or v3)
* SSL2_CLIENT_VERSION_LSB (if version greater than SSLv2) ( or v3)
*
* ... SSLv2 Compatible Hello ...
*/
/* SSLv2 CLIENT_HELLO starts with
* RECORD_LENGTH_MSB (ignore)
* RECORD_LENGTH_LSB (ignore)
* SSL2_CLIENT_HELLO (must be equal)
* SSL2_CLIENT_VERSION_MSB (if version greater than SSLv2) ( or v3)
* SSL2_CLIENT_VERSION_LSB (if version greater than SSLv2) ( or v3)
*
* ... SSLv2 CLIENT_HELLO ...
*/
/* SSLv2 SERVER_HELLO starts with
* RECORD_LENGTH_MSB (ignore)
* RECORD_LENGTH_LSB (ignore)
* SSL2_SERVER_HELLO (must be equal)
* SSL2_SESSION_ID_HIT (ignore)
* SSL2_CERTIFICATE_TYPE (ignore)
* SSL2_CLIENT_VERSION_MSB (if version greater than SSLv2) ( or v3)
* SSL2_CLIENT_VERSION_LSB (if version greater than SSLv2) ( or v3)
*
* ... SSLv2 SERVER_HELLO ...
*/
/* SSLv3 Type 2 Hello starts with
* RECORD_LENGTH_MSB (ignore)
* RECORD_LENGTH_LSB (ignore)
* SSL2_CLIENT_HELLO (must be equal)
* SSL2_CLIENT_VERSION_MSB (if version greater than SSLv3)
* SSL2_CLIENT_VERSION_LSB (if version greater than SSLv3)
*
* ... SSLv2 Compatible Hello ...
*/
/* SSLv3 Type 3 Hello starts with
* 22 (HANDSHAKE MESSAGE)
* VERSION MSB
* VERSION LSB
* RECORD_LENGTH_MSB (ignore)
* RECORD_LENGTH_LSB (ignore)
* HS TYPE (CLIENT_HELLO)
* 3 bytes HS record length
* HS Version
* HS Version
*/
/* SSLv2 message codes
* SSL_MT_ERROR 0
* SSL_MT_CLIENT_HELLO 1
* SSL_MT_CLIENT_MASTER_KEY 2
* SSL_MT_CLIENT_FINISHED 3
* SSL_MT_SERVER_HELLO 4
* SSL_MT_SERVER_VERIFY 5
* SSL_MT_SERVER_FINISHED 6
* SSL_MT_REQUEST_CERTIFICATE 7
* SSL_MT_CLIENT_CERTIFICATE 8
*/
int version = -1;
if ((bytes == null || bytes.Length <= 0))
{
NetEventSource.Fail(this, "Header buffer is not allocated.");
}
// If the first byte is SSL3 HandShake, then check if we have a SSLv3 Type3 client hello.
if (bytes[0] == (byte)FrameType.Handshake || bytes[0] == (byte)FrameType.AppData
|| bytes[0] == (byte)FrameType.Alert)
{
if (length < 3)
{
return Framing.Invalid;
}
#if TRACE_VERBOSE
if (bytes[1] != 3 && NetEventSource.IsEnabled)
{
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"WARNING: SslState::DetectFraming() SSL protocol is > 3, trying SSL3 framing in retail = {bytes[i]:x}");
}
#endif
version = (bytes[1] << 8) | bytes[2];
if (version < 0x300 || version >= 0x500)
{
return Framing.Invalid;
}
//
// This is an SSL3 Framing
//
return Framing.SinceSSL3;
}
#if TRACE_VERBOSE
if ((bytes[0] & 0x80) == 0 && NetEventSource.IsEnabled)
{
// We have a three-byte header format
if (NetEventSource.IsEnabled) NetEventSource.Info(this, $"WARNING: SslState::DetectFraming() SSL v <=2 HELLO has no high bit set for 3 bytes header, we are broken, received byte = {bytes[0]:x}");
}
#endif
if (length < 3)
{
return Framing.Invalid;
}
if (bytes[2] > 8)
{
return Framing.Invalid;
}
if (bytes[2] == 0x1) // SSL_MT_CLIENT_HELLO
{
if (length >= 5)
{
version = (bytes[3] << 8) | bytes[4];
}
}
else if (bytes[2] == 0x4) // SSL_MT_SERVER_HELLO
{
if (length >= 7)
{
version = (bytes[5] << 8) | bytes[6];
}
}
if (version != -1)
{
// If this is the first packet, the client may start with an SSL2 packet
// but stating that the version is 3.x, so check the full range.
// For the subsequent packets we assume that an SSL2 packet should have a 2.x version.
if (_Framing == Framing.Unknown)
{
if (version != 0x0002 && (version < 0x200 || version >= 0x500))
{
return Framing.Invalid;
}
}
else
{
if (version != 0x0002)
{
return Framing.Invalid;
}
}
}
// When server has replied the framing is already fixed depending on the prior client packet
if (!Context.IsServer || _Framing == Framing.Unified)
{
return Framing.BeforeSSL3;
}
return Framing.Unified; // Will use Ssl2 just for this frame.
}
//
// This is called from SslStream class too.
internal int GetRemainingFrameSize(byte[] buffer, int offset, int dataSize)
{
if (NetEventSource.IsEnabled)
NetEventSource.Enter(this, buffer, offset, dataSize);
int payloadSize = -1;
switch (_Framing)
{
case Framing.Unified:
case Framing.BeforeSSL3:
if (dataSize < 2)
{
throw new System.IO.IOException(SR.net_ssl_io_frame);
}
// Note: Cannot detect version mismatch for <= SSL2
if ((buffer[offset] & 0x80) != 0)
{
// Two bytes
payloadSize = (((buffer[offset] & 0x7f) << 8) | buffer[offset + 1]) + 2;
payloadSize -= dataSize;
}
else
{
// Three bytes
payloadSize = (((buffer[offset] & 0x3f) << 8) | buffer[offset + 1]) + 3;
payloadSize -= dataSize;
}
break;
case Framing.SinceSSL3:
if (dataSize < 5)
{
throw new System.IO.IOException(SR.net_ssl_io_frame);
}
payloadSize = ((buffer[offset + 3] << 8) | buffer[offset + 4]) + 5;
payloadSize -= dataSize;
break;
default:
break;
}
if (NetEventSource.IsEnabled)
NetEventSource.Exit(this, payloadSize);
return payloadSize;
}
//
// Called with no user stack.
//
private void AsyncResumeHandshake(object state)
{
AsyncProtocolRequest request = state as AsyncProtocolRequest;
Debug.Assert(request != null, "Expected an AsyncProtocolRequest reference.");
try
{
ForceAuthentication(Context.IsServer, request.Buffer, request);
}
catch (Exception e)
{
request.CompleteUserWithError(e);
}
}
//
// Called with no user stack.
//
private void AsyncResumeHandshakeRead(object state)
{
AsyncProtocolRequest asyncRequest = (AsyncProtocolRequest)state;
try
{
if (_pendingReHandshake)
{
// Resume as read a blob.
StartReceiveBlob(asyncRequest.Buffer, asyncRequest);
}
else
{
// Resume as process the blob.
ProcessReceivedBlob(asyncRequest.Buffer, asyncRequest.Buffer == null ? 0 : asyncRequest.Buffer.Length, asyncRequest);
}
}
catch (Exception e)
{
if (asyncRequest.IsUserCompleted)
{
// This will throw on a worker thread.
throw;
}
FinishHandshake(e, asyncRequest);
}
}
//
// Called with no user stack.
//
private void CompleteRequestWaitCallback(object state)
{
AsyncProtocolRequest request = (AsyncProtocolRequest)state;
// Force async completion.
if (request.MustCompleteSynchronously)
{
throw new InternalException();
}
request.CompleteRequest(0);
}
private void RehandshakeCompleteCallback(IAsyncResult result)
{
LazyAsyncResult lazyAsyncResult = (LazyAsyncResult)result;
if (lazyAsyncResult == null)
{
NetEventSource.Fail(this, "result is null!");
}
if (!lazyAsyncResult.InternalPeekCompleted)
{
NetEventSource.Fail(this, "result is not completed!");
}
// If the rehandshake succeeded, FinishHandshake has already been called; if there was a SocketException
// during the handshake, this gets called directly from FixedSizeReader, and we need to call
// FinishHandshake to wake up the Read that triggered this rehandshake so the error gets back to the caller
Exception exception = lazyAsyncResult.InternalWaitForCompletion() as Exception;
if (exception != null)
{
// We may be calling FinishHandshake reentrantly, as FinishHandshake can call
// asyncRequest.CompleteWithError, which will result in this method being called.
// This is not a problem because:
//
// 1. We pass null as the asyncRequest parameter, so this second call to FinishHandshake won't loop
// back here.
//
// 2. _QueuedWriteStateRequest and _QueuedReadStateRequest are set to null after the first call,
// so, we won't invoke their callbacks again.
//
// 3. SetException won't overwrite an already-set _Exception.
//
// 4. There are three possibilities for _LockReadState and _LockWriteState:
//
// a. They were set back to None by the first call to FinishHandshake, and this will set them to
// None again: a no-op.
//
// b. They were set to None by the first call to FinishHandshake, but as soon as the lock was given
// up, another thread took a read/write lock. Calling FinishHandshake again will set them back
// to None, but that's fine because that thread will be throwing _Exception before it actually
// does any reading or writing and setting them back to None in a catch block anyways.
//
// c. If there is a Read/Write going on another thread, and the second FinishHandshake clears its
// read/write lock, it's fine because no other Read/Write can look at the lock until the current
// one gives up _SslStream._NestedRead/Write, and no handshake will look at the lock because
// handshakes are only triggered in response to successful reads (which won't happen once
// _Exception is set).
FinishHandshake(exception, null);
}
}
internal IAsyncResult BeginShutdown(AsyncCallback asyncCallback, object asyncState)
{
CheckThrow(authSuccessCheck: true, shutdownCheck: true);
ProtocolToken message = Context.CreateShutdownToken();
return TaskToApm.Begin(InnerStream.WriteAsync(message.Payload, 0, message.Payload.Length), asyncCallback, asyncState);
}
internal void EndShutdown(IAsyncResult result)
{
CheckThrow(authSuccessCheck: true, shutdownCheck: true);
TaskToApm.End(result);
_shutdown = true;
}
}
}
| |
using System;
using System.Collections.Generic;
using Microsoft.Extensions.Logging;
using Orleans.Configuration;
using System.Threading.Tasks;
using System.Threading;
using Microsoft.Extensions.Options;
using System.Linq;
using Orleans.Internal;
namespace Orleans.Runtime.MembershipService
{
/// <summary>
/// Responsible for updating membership table with details about the local silo.
/// </summary>
internal class MembershipAgent : IHealthCheckParticipant, ILifecycleParticipant<ISiloLifecycle>, IDisposable, MembershipAgent.ITestAccessor
{
private readonly CancellationTokenSource cancellation = new CancellationTokenSource();
private readonly MembershipTableManager tableManager;
private readonly ClusterHealthMonitor clusterHealthMonitor;
private readonly ILocalSiloDetails localSilo;
private readonly IFatalErrorHandler fatalErrorHandler;
private readonly ClusterMembershipOptions clusterMembershipOptions;
private readonly ILogger<MembershipAgent> log;
private readonly IAsyncTimer iAmAliveTimer;
private Func<DateTime> getUtcDateTime = () => DateTime.UtcNow;
public MembershipAgent(
MembershipTableManager tableManager,
ClusterHealthMonitor clusterHealthMonitor,
ILocalSiloDetails localSilo,
IFatalErrorHandler fatalErrorHandler,
IOptions<ClusterMembershipOptions> options,
ILogger<MembershipAgent> log,
IAsyncTimerFactory timerFactory)
{
this.tableManager = tableManager;
this.clusterHealthMonitor = clusterHealthMonitor;
this.localSilo = localSilo;
this.fatalErrorHandler = fatalErrorHandler;
this.clusterMembershipOptions = options.Value;
this.log = log;
this.iAmAliveTimer = timerFactory.Create(
this.clusterMembershipOptions.IAmAliveTablePublishTimeout,
nameof(UpdateIAmAlive));
}
internal interface ITestAccessor
{
Action OnUpdateIAmAlive { get; set; }
Func<DateTime> GetDateTime { get; set; }
}
Action ITestAccessor.OnUpdateIAmAlive { get; set; }
Func<DateTime> ITestAccessor.GetDateTime { get => this.getUtcDateTime; set => this.getUtcDateTime = value ?? throw new ArgumentNullException(nameof(value)); }
private async Task UpdateIAmAlive()
{
if (this.log.IsEnabled(LogLevel.Debug)) this.log.LogDebug("Starting periodic membership liveness timestamp updates");
try
{
TimeSpan? onceOffDelay = default;
while (await this.iAmAliveTimer.NextTick(onceOffDelay) && !this.tableManager.CurrentStatus.IsTerminating())
{
onceOffDelay = default;
try
{
var stopwatch = ValueStopwatch.StartNew();
((ITestAccessor)this).OnUpdateIAmAlive?.Invoke();
await this.tableManager.UpdateIAmAlive();
if (this.log.IsEnabled(LogLevel.Trace)) this.log.LogTrace("Updating IAmAlive took {Elapsed}", stopwatch.Elapsed);
}
catch (Exception exception)
{
this.log.LogError(
(int)ErrorCode.MembershipUpdateIAmAliveFailure,
"Failed to update table entry for this silo, will retry shortly: {Exception}",
exception);
// Retry quickly
onceOffDelay = TimeSpan.FromMilliseconds(200);
}
}
}
catch (Exception exception) when (this.fatalErrorHandler.IsUnexpected(exception))
{
this.log.LogError("Error updating liveness timestamp: {Exception}", exception);
this.fatalErrorHandler.OnFatalException(this, nameof(UpdateIAmAlive), exception);
}
finally
{
if (this.log.IsEnabled(LogLevel.Debug)) this.log.LogDebug("Stopping periodic membership liveness timestamp updates");
}
}
private async Task BecomeActive()
{
this.log.LogInformation(
(int)ErrorCode.MembershipBecomeActive,
"-BecomeActive");
if (this.clusterMembershipOptions.ValidateInitialConnectivity)
{
await this.ValidateInitialConnectivity();
}
else
{
this.log.LogWarning(
(int)ErrorCode.MembershipSendingPreJoinPing,
$"{nameof(ClusterMembershipOptions)}.{nameof(ClusterMembershipOptions.ValidateInitialConnectivity)} is set to false. This is NOT recommended for a production environment.");
}
try
{
await this.UpdateStatus(SiloStatus.Active);
this.log.LogInformation(
(int)ErrorCode.MembershipFinishBecomeActive,
"-Finished BecomeActive.");
}
catch (Exception exception)
{
this.log.LogInformation(
(int)ErrorCode.MembershipFailedToBecomeActive,
"BecomeActive failed: {Exception}",
exception);
throw;
}
}
private async Task ValidateInitialConnectivity()
{
// Continue attempting to validate connectivity until some reasonable timeout.
var maxAttemptTime = this.clusterMembershipOptions.ProbeTimeout.Multiply(5.0 * this.clusterMembershipOptions.NumMissedProbesLimit);
var attemptNumber = 1;
var now = this.getUtcDateTime();
var attemptUntil = now + maxAttemptTime;
var canContinue = true;
while (true)
{
try
{
var activeSilos = new List<SiloAddress>();
foreach (var item in this.tableManager.MembershipTableSnapshot.Entries)
{
var entry = item.Value;
if (entry.Status != SiloStatus.Active) continue;
if (entry.SiloAddress.IsSameLogicalSilo(this.localSilo.SiloAddress)) continue;
if (entry.HasMissedIAmAlivesSince(this.clusterMembershipOptions, now) != default) continue;
activeSilos.Add(entry.SiloAddress);
}
var failedSilos = await this.clusterHealthMonitor.CheckClusterConnectivity(activeSilos.ToArray());
var successfulSilos = activeSilos.Where(s => !failedSilos.Contains(s)).ToList();
// If there were no failures, terminate the loop and return without error.
if (failedSilos.Count == 0) break;
this.log.LogError(
(int)ErrorCode.MembershipJoiningPreconditionFailure,
"Failed to get ping responses from {FailedCount} of {ActiveCount} active silos. "
+ "Newly joining silos validate connectivity with all active silos that have recently updated their 'I Am Alive' value before joining the cluster. "
+ "Successfully contacted: {SuccessfulSilos}. Silos which did not respond successfully are: {FailedSilos}. "
+ "Will continue attempting to validate connectivity until {Timeout}. Attempt #{Attempt}",
failedSilos.Count,
activeSilos.Count,
Utils.EnumerableToString(successfulSilos),
Utils.EnumerableToString(failedSilos),
attemptUntil,
attemptNumber);
if (now + TimeSpan.FromSeconds(5) > attemptUntil)
{
canContinue = false;
var msg = $"Failed to get ping responses from {failedSilos.Count} of {activeSilos.Count} active silos. "
+ "Newly joining silos validate connectivity with all active silos that have recently updated their 'I Am Alive' value before joining the cluster. "
+ $"Successfully contacted: {Utils.EnumerableToString(successfulSilos)}. Failed to get response from: {Utils.EnumerableToString(failedSilos)}";
throw new OrleansClusterConnectivityCheckFailedException(msg);
}
// Refresh membership after some delay and retry.
await Task.Delay(TimeSpan.FromSeconds(5));
await this.tableManager.Refresh();
}
catch (Exception exception) when (canContinue)
{
this.log.LogError("Failed to validate initial cluster connectivity: {Exception}", exception);
await Task.Delay(TimeSpan.FromSeconds(1));
}
++attemptNumber;
now = this.getUtcDateTime();
}
}
private async Task BecomeJoining()
{
this.log.Info(ErrorCode.MembershipJoining, "-Joining");
try
{
await this.UpdateStatus(SiloStatus.Joining);
}
catch (Exception exc)
{
this.log.Error(ErrorCode.MembershipFailedToJoin, "Error updating status to Joining", exc);
throw;
}
}
private async Task BecomeShuttingDown()
{
this.log.Info(ErrorCode.MembershipShutDown, "-Shutdown");
try
{
await this.UpdateStatus(SiloStatus.ShuttingDown);
}
catch (Exception exc)
{
this.log.Error(ErrorCode.MembershipFailedToShutdown, "Error updating status to ShuttingDown", exc);
throw;
}
}
private async Task BecomeStopping()
{
log.Info(ErrorCode.MembershipStop, "-Stop");
try
{
await this.UpdateStatus(SiloStatus.Stopping);
}
catch (Exception exc)
{
log.Error(ErrorCode.MembershipFailedToStop, "Error updating status to Stopping", exc);
throw;
}
}
private async Task BecomeDead()
{
this.log.LogInformation(
(int)ErrorCode.MembershipKillMyself,
"Updating status to Dead");
try
{
await this.UpdateStatus(SiloStatus.Dead);
}
catch (Exception exception)
{
this.log.LogError(
(int)ErrorCode.MembershipFailedToKillMyself,
"Failure updating status to " + nameof(SiloStatus.Dead) + ": {Exception}",
exception);
throw;
}
}
private async Task UpdateStatus(SiloStatus status)
{
await this.tableManager.UpdateStatus(status);
}
void ILifecycleParticipant<ISiloLifecycle>.Participate(ISiloLifecycle lifecycle)
{
{
Task OnRuntimeInitializeStart(CancellationToken ct) => Task.CompletedTask;
async Task OnRuntimeInitializeStop(CancellationToken ct)
{
this.iAmAliveTimer.Dispose();
this.cancellation.Cancel();
await Task.WhenAny(
Task.Run(() => this.BecomeDead()),
Task.Delay(TimeSpan.FromMinutes(1)));
}
lifecycle.Subscribe(
nameof(MembershipAgent),
ServiceLifecycleStage.RuntimeInitialize + 1, // Gossip before the outbound queue gets closed
OnRuntimeInitializeStart,
OnRuntimeInitializeStop);
}
{
async Task AfterRuntimeGrainServicesStart(CancellationToken ct)
{
await Task.Run(() => this.BecomeJoining());
}
Task AfterRuntimeGrainServicesStop(CancellationToken ct) => Task.CompletedTask;
lifecycle.Subscribe(
nameof(MembershipAgent),
ServiceLifecycleStage.AfterRuntimeGrainServices,
AfterRuntimeGrainServicesStart,
AfterRuntimeGrainServicesStop);
}
{
var tasks = new List<Task>();
async Task OnBecomeActiveStart(CancellationToken ct)
{
await Task.Run(() => this.BecomeActive());
tasks.Add(Task.Run(() => this.UpdateIAmAlive()));
}
async Task OnBecomeActiveStop(CancellationToken ct)
{
this.iAmAliveTimer.Dispose();
this.cancellation.Cancel(throwOnFirstException: false);
var cancellationTask = ct.WhenCancelled();
if (ct.IsCancellationRequested)
{
await Task.Run(() => this.BecomeStopping());
}
else
{
// Allow some minimum time for graceful shutdown.
var gracePeriod = Task.WhenAll(Task.Delay(ClusterMembershipOptions.ClusteringShutdownGracePeriod), cancellationTask);
var task = await Task.WhenAny(gracePeriod, this.BecomeShuttingDown());
if (ReferenceEquals(task, gracePeriod))
{
this.log.LogWarning("Graceful shutdown aborted: starting ungraceful shutdown");
await Task.Run(() => this.BecomeStopping());
}
else
{
await Task.WhenAny(gracePeriod, Task.WhenAll(tasks));
}
}
}
lifecycle.Subscribe(
nameof(MembershipAgent),
ServiceLifecycleStage.BecomeActive,
OnBecomeActiveStart,
OnBecomeActiveStop);
}
}
public void Dispose()
{
this.iAmAliveTimer.Dispose();
}
bool IHealthCheckable.CheckHealth(DateTime lastCheckTime)
{
var ok = this.iAmAliveTimer.CheckHealth(lastCheckTime);
return ok;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.Versioning;
using Xunit;
using Xunit.Extensions;
namespace NuGet.Test
{
public class VersionUtilityTest
{
[Theory]
[InlineData("boo\\foo.dll", "foo.dll")]
[InlineData("far\\sub\\sub2\\foo.dll", "sub\\sub2\\foo.dll")]
[InlineData("netum\\who\\bar.dll", "who\\bar.dll")]
public void ParseFrameworkFolderNameStrictReturnsUnsupportedFxIfParsingFails(string path, string expectedEffectivePath)
{
// Act
string effectivePath;
var frameworkName = VersionUtility.ParseFrameworkFolderName(
path, strictParsing: true, effectivePath: out effectivePath);
// Assert
Assert.Equal(VersionUtility.UnsupportedFrameworkName, frameworkName);
Assert.Equal(expectedEffectivePath, effectivePath);
}
[Theory]
[InlineData("lib\\net40\\foo.dll", "4.0", ".NETFramework", "foo.dll")]
[InlineData("lib\\net40\\sub\\foo.dll", "4.0", ".NETFramework", "sub\\foo.dll")]
[InlineData("lib\\foo.dll", null, null, "foo.dll")]
[InlineData("content\\sl35\\javascript\\jQuery.js", "3.5", "Silverlight", "javascript\\jQuery.js")]
[InlineData("content\\netmf\\CSS\\jQuery.css", "0.0", ".NETMicroFramework", "CSS\\jQuery.css")]
[InlineData("tools\\winrt45\\install.ps1", "4.5", ".NETCore", "install.ps1")]
[InlineData("tools\\winrt10\\uninstall.ps1", "1.0", ".NETCore", "uninstall.ps1")]
[InlineData("tools\\winkt10\\uninstall.ps1", null, null, "winkt10\\uninstall.ps1")]
[InlineData("tools\\init.ps1", null, null, "init.ps1")]
[InlineData("random\\foo.txt", null, null, "random\\foo.txt")]
public void TestParseFrameworkFolderNameFromFilePath(
string filePath, string expectedVersion, string expectedIdentifier, string expectedEffectivePath)
{
// Act
string effectivePath;
var frameworkName = VersionUtility.ParseFrameworkNameFromFilePath(filePath, out effectivePath);
// Assert
if (expectedVersion == null)
{
Assert.Null(frameworkName);
}
else
{
Assert.NotNull(frameworkName);
Assert.Equal(expectedIdentifier, frameworkName.Identifier);
Assert.Equal(expectedVersion, frameworkName.Version.ToString());
}
Assert.Equal(expectedEffectivePath, effectivePath);
}
[Theory]
[InlineData("net40\\foo.dll", "4.0", ".NETFramework", "foo.dll")]
[InlineData("netmu40\\sub\\foo.dll", "0.0", "Unsupported", "sub\\foo.dll")]
[InlineData("foo.dll", null, null, "foo.dll")]
[InlineData("sl35\\javascript\\jQuery.js", "3.5", "Silverlight", "javascript\\jQuery.js")]
[InlineData("netmf\\CSS\\jQuery.css", "0.0", ".NETMicroFramework", "CSS\\jQuery.css")]
[InlineData("CSS\\jQuery.css", "0.0", "Unsupported", "jQuery.css")]
[InlineData("winrt45\\install.ps1", "4.5", ".NETCore", "install.ps1")]
[InlineData("winrt10\\uninstall.ps1", "1.0", ".NETCore", "uninstall.ps1")]
[InlineData("winkt10\\uninstall.ps1", "0.0", "Unsupported", "uninstall.ps1")]
[InlineData("init.ps1", null, null, "init.ps1")]
[InlineData("random\\foo.txt", "0.0", "Unsupported", "foo.txt")]
public void TestParseFrameworkFolderNameWithStrickParsing(
string filePath, string expectedVersion, string expectedIdentifier, string expectedEffectivePath)
{
// Act
string effectivePath;
var frameworkName = VersionUtility.ParseFrameworkFolderName(filePath, strictParsing: true, effectivePath: out effectivePath);
// Assert
if (expectedVersion == null)
{
Assert.Null(frameworkName);
}
else
{
Assert.NotNull(frameworkName);
Assert.Equal(expectedIdentifier, frameworkName.Identifier);
Assert.Equal(expectedVersion, frameworkName.Version.ToString());
}
Assert.Equal(expectedEffectivePath, effectivePath);
}
[Theory]
[InlineData("net40\\foo.dll", "4.0", ".NETFramework", "foo.dll")]
[InlineData("net40\\sub\\foo.dll", "4.0", ".NETFramework", "sub\\foo.dll")]
[InlineData("foo.dll", null, null, "foo.dll")]
[InlineData("sl35\\javascript\\jQuery.js", "3.5", "Silverlight", "javascript\\jQuery.js")]
[InlineData("netmf\\CSS\\jQuery.css", "0.0", ".NETMicroFramework", "CSS\\jQuery.css")]
[InlineData("netmf\\CSS\\jQuery.css", "0.0", ".NETMicroFramework", "CSS\\jQuery.css")]
[InlineData("winrt45\\install.ps1", "4.5", ".NETCore", "install.ps1")]
[InlineData("winrt10\\uninstall.ps1", "1.0", ".NETCore", "uninstall.ps1")]
[InlineData("winrt10\\uninstall.ps1", "1.0", ".NETCore", "uninstall.ps1")]
[InlineData("init.ps1", null, null, "init.ps1")]
[InlineData("random\\foo.txt", null, null, "random\\foo.txt")]
public void TestParseFrameworkFolderNameWithNonStrickParsing(
string filePath, string expectedVersion, string expectedIdentifier, string expectedEffectivePath)
{
// Act
string effectivePath;
var frameworkName = VersionUtility.ParseFrameworkFolderName(filePath, strictParsing: false, effectivePath: out effectivePath);
// Assert
if (expectedVersion == null)
{
Assert.Null(frameworkName);
}
else
{
Assert.NotNull(frameworkName);
Assert.Equal(expectedIdentifier, frameworkName.Identifier);
Assert.Equal(expectedVersion, frameworkName.Version.ToString());
}
Assert.Equal(expectedEffectivePath, effectivePath);
}
[Theory]
[InlineData("content\\-\\wow\\cool.txt", "-\\wow\\cool.txt")]
[InlineData("content\\-world\\x.dll", "-world\\x.dll")]
public void ParseFrameworkNameFromFilePathDoesNotThrowIfPathHasADash(string path, string expectedPath)
{
// Act
string effectivePath;
var framework = VersionUtility.ParseFrameworkNameFromFilePath(path, out effectivePath);
// Assert
Assert.Null(framework);
Assert.Equal(expectedPath, effectivePath);
}
[Fact]
public void ParseFrameworkNameNormalizesNativeFrameworkNames()
{
// Arrange
Version defaultVersion = new Version("0.0");
// Act
var frameworkName = VersionUtility.ParseFrameworkName("native");
// Assert
Assert.Equal("native", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedNetFrameworkNames()
{
// Arrange
var knownNameFormats = new[] { ".net", ".netframework", "net", "netframework" };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedPortableNetFrameworkNames()
{
// Arrange
var knownNameFormats = new[] { ".netportable-sl3", "netportable-net4", "portable-netcore45" };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal(".NETPortable", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedWindowsPhoneNames()
{
// Arrange
var knownNameFormats = new[] { "windowsphone", "wp" };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("WindowsPhone", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedWindowsPhoneAppNames()
{
// Arrange
var knownNameFormats = new[] { "WindowsPhoneApp", "wpa" };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("WindowsPhoneApp", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedWinRTFrameworkNames()
{
// Arrange
var knownNameFormats = new[] { "winrt", ".NETCore", "NetCore" };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal(".NETCore", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedWindowsFrameworkNames()
{
// Arrange
var knownNameFormats = new[] { "Windows", "win" };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("Windows", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedNetMicroFrameworkNames()
{
// Arrange
var knownNameFormats = new[] { "netmf4.1", ".NETMicroFramework4.1" };
Version version41 = new Version("4.1");
// Act
var frameworkNames = knownNameFormats.Select(fmt => VersionUtility.ParseFrameworkName(fmt));
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal(".NETMicroFramework", frameworkName.Identifier);
Assert.Equal(version41, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedSilverlightNames()
{
// Arrange
var knownNameFormats = new[] { "sl", "SL", "SilVerLight", "Silverlight", "Silverlight " };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(VersionUtility.ParseFrameworkName);
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("Silverlight", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedMonoAndroidNames()
{
// Arrange
var knownNameFormats = new[] { "MonoAndroid", "monoandroid", "MONOANDROID " };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(VersionUtility.ParseFrameworkName);
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("MonoAndroid", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedMonoTouchNames()
{
// Arrange
var knownNameFormats = new[] { "MonoTouch", "monotouch", "monoTOUCH " };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(VersionUtility.ParseFrameworkName);
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("MonoTouch", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameNormalizesSupportedMonoMacNames()
{
// Arrange
var knownNameFormats = new[] { "MonoMac", "monomac", "mONOmAC " };
Version defaultVersion = new Version("0.0");
// Act
var frameworkNames = knownNameFormats.Select(VersionUtility.ParseFrameworkName);
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal("MonoMac", frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Theory]
[InlineData("aspnet50", "5.0", "ASP.NET")]
[InlineData("aspnetcore50", "5.0", "ASP.NETCore")]
[InlineData("asp.net50", "5.0", "ASP.NET")]
[InlineData("asp.netcore50", "5.0", "ASP.NETCore")]
[InlineData("ASPNET50", "5.0", "ASP.NET")]
[InlineData("ASPNETCORE50", "5.0", "ASP.NETCore")]
[InlineData("ASP.NET50", "5.0", "ASP.NET")]
[InlineData("ASP.NETCORE50", "5.0", "ASP.NETCore")]
// 5.1 doesn't exist (at least at time of writing), just verifying the logic
[InlineData("aspnet51", "5.1", "ASP.NET")]
[InlineData("aspnetcore51", "5.1", "ASP.NETCore")]
public void ParseFrameworkNameNormalizesSupportedASPNetFrameworkNames(string shortName, string version, string identifier)
{
// Arrange
Version expectedVersion = new Version(version);
// Act
var expanded = VersionUtility.ParseFrameworkName(shortName);
// Assert
Assert.Equal(expectedVersion, expanded.Version);
Assert.Equal(identifier, expanded.Identifier);
Assert.True(String.IsNullOrEmpty(expanded.Profile));
}
[Theory]
[InlineData(new[] { "XamarinIOS", "xamarinios", "XAMARINIOS " }, "0.0", "Xamarin.iOS")]
[InlineData(new[] { "Xamarin.iOS", "xamarin.ios", "XAMARIN.IOS " }, "0.0", "Xamarin.iOS")]
[InlineData(new[] { "XamarinMac", "xamarinmac", "XAMARINMAC " }, "0.0", "Xamarin.Mac")]
[InlineData(new[] { "Xamarin.Mac", "xamarin.mac", "XAMARIN.MAC " }, "0.0", "Xamarin.Mac")]
[InlineData(new[] { "XamarinPlayStationThree", "xamarinplaystationthree", "XAMARINPLAYSTATIONthree " }, "0.0","Xamarin.PlayStation3")]
[InlineData(new[] { "Xamarin.PlayStationThree", "xamarin.playstationthree", "XAMARIN.PLAYSTATIONTHREE " }, "0.0", "Xamarin.PlayStation3")]
[InlineData(new[] { "XamarinPSThree", "xamarinpsthree", "XAMARINPSTHREE " }, "0.0", "Xamarin.PlayStation3")]
[InlineData(new[] { "XamarinPlayStationFour", "xamarinplaystationfour", "XAMARINPLAYSTATIONFOUR " }, "0.0", "Xamarin.PlayStation4")]
[InlineData(new[] { "Xamarin.PlayStationFour", "xamarin.playstationfour", "XAMARIN.PLAYSTATIONFOUR " }, "0.0", "Xamarin.PlayStation4")]
[InlineData(new[] { "XamarinPSFour", "xamarinpsfour", "XAMARINPSFOUR " }, "0.0", "Xamarin.PlayStation4")]
[InlineData(new[] { "XamarinPlayStationVita", "xamarinplaystationvita", "XAMARINPLAYSTATIONVITA " }, "0.0", "Xamarin.PlayStationVita")]
[InlineData(new[] { "Xamarin.PlayStationVita", "xamarin.playstationvita", "XAMARIN.PLAYSTATIONVITA " }, "0.0", "Xamarin.PlayStationVita")]
[InlineData(new[] { "XamarinPSVita", "xamarinpsvita", "XAMARINPSVITA " }, "0.0", "Xamarin.PlayStationVita")]
[InlineData(new[] { "Xamarin.XboxThreeSixty", "xamarin.xboxthreesixty", "XAMARIN.XBOXTHREESIXTY " }, "0.0", "Xamarin.Xbox360")]
[InlineData(new[] { "XamarinXboxThreeSixty", "xamarinxboxthreesixty", "XAMARINXBOXTHREESIXTY " }, "0.0", "Xamarin.Xbox360")]
[InlineData(new[] { "XamarinXboxOne", "xamarinxboxone", "XAMARINXBOXONE " }, "0.0", "Xamarin.XboxOne")]
[InlineData(new[] { "Xamarin.XboxOne", "xamarin.xboxone", "XAMARIN.XBOXONE " }, "0.0", "Xamarin.XboxOne")]
public void ParseFrameworkNameNormalizesSupportedXamarinFrameworkNames(string[] knownNameFormats, string version, string expectedIdentifier)
{
// Arrange
Version defaultVersion = new Version(version);
// Act
var frameworkNames = knownNameFormats.Select(VersionUtility.ParseFrameworkName);
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal(expectedIdentifier, frameworkName.Identifier);
Assert.Equal(defaultVersion, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameReturnsUnsupportedFrameworkNameIfUnrecognized()
{
// Act
var frameworkName1 = VersionUtility.ParseFrameworkName("NETCF20");
var frameworkName2 = VersionUtility.ParseFrameworkName("NET40ClientProfile");
var frameworkName3 = VersionUtility.ParseFrameworkName("NET40Foo");
// Assert
Assert.Equal("Unsupported", frameworkName1.Identifier);
Assert.Equal("Unsupported", frameworkName2.Identifier);
Assert.Equal("Unsupported", frameworkName3.Identifier);
}
[Fact]
public void ParseFrameworkNameUsesNetFrameworkIfOnlyVersionSpecified()
{
// Arrange
Version version20 = new Version("2.0");
// Act
var frameworkName = VersionUtility.ParseFrameworkName("20");
// Assert
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(version20, frameworkName.Version);
}
[Fact]
public void ParseFrameworkNameVersionFormats()
{
// Arrange
var versionFormats = new[] { "4.0", "40", "4" };
Version version40 = new Version("4.0");
// Act
var frameworkNames = versionFormats.Select(VersionUtility.ParseFrameworkName);
// Assert
foreach (var frameworkName in frameworkNames)
{
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(version40, frameworkName.Version);
}
}
[Fact]
public void ParseFrameworkNameVersionIntegerLongerThan4CharsTrimsExccess()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("NET41235");
// Assert
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(new Version("4.1.2.3"), frameworkName.Version);
}
[Fact]
public void ParseFrameworkNameInvalidVersionFormatUsesDefaultVersion()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("NET4.1.4.5.5");
// Assert
Assert.Equal("Unsupported", frameworkName.Identifier);
}
[Fact]
public void ParseFrameworkNameWithProfile()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("net40-client");
// Assert
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(new Version("4.0"), frameworkName.Version);
Assert.Equal("Client", frameworkName.Profile);
}
[Fact]
public void ParseFrameworkNameWithUnknownProfileUsesProfileAsIs()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("net40-other");
// Assert
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(new Version("4.0"), frameworkName.Version);
Assert.Equal("other", frameworkName.Profile);
}
[Fact]
public void ParseFrameworkNameWithFullProfileNoamlizesToEmptyProfile()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("net40-full");
// Assert
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(new Version("4.0"), frameworkName.Version);
Assert.Equal(String.Empty, frameworkName.Profile);
}
[Fact]
public void ParseFrameworkNameWithWPProfileGetNormalizedToWindowsPhone()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("sl4-wp");
// Assert
Assert.Equal("Silverlight", frameworkName.Identifier);
Assert.Equal(new Version("4.0"), frameworkName.Version);
Assert.Equal("WindowsPhone", frameworkName.Profile);
}
[Fact]
public void ParseFrameworkNameWithCFProfileGetNormalizedToCompactFramework()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("net20-cf");
// Assert
Assert.Equal(".NETFramework", frameworkName.Identifier);
Assert.Equal(new Version("2.0"), frameworkName.Version);
Assert.Equal("CompactFramework", frameworkName.Profile);
}
[Fact]
public void ParseFrameworkNameWithEmptyProfile()
{
// Act
var frameworkName = VersionUtility.ParseFrameworkName("sl4-");
// Assert
Assert.Equal("Silverlight", frameworkName.Identifier);
Assert.Equal(new Version("4.0"), frameworkName.Version);
Assert.Equal(String.Empty, frameworkName.Profile);
}
[Fact]
public void ParseFrameworkNameWithInvalidFrameworkNameThrows()
{
// Act
ExceptionAssert.ThrowsArgumentException(() => VersionUtility.ParseFrameworkName("-"), "frameworkName", "Framework name is missing.");
ExceptionAssert.ThrowsArgumentException(() => VersionUtility.ParseFrameworkName("-client"), "frameworkName", "Framework name is missing.");
ExceptionAssert.ThrowsArgumentException(() => VersionUtility.ParseFrameworkName(""), "frameworkName", "Framework name is missing.");
ExceptionAssert.ThrowsArgumentException(() => VersionUtility.ParseFrameworkName("---"), "frameworkName", "Invalid framework name format. Expected {framework}{version}-{profile}.");
}
[Fact]
public void ParseFrameworkFolderNameWithoutFramework()
{
Assert.Null(VersionUtility.ParseFrameworkFolderName(@"foo.dll"));
}
[Theory]
[InlineData(@"sub\foo.dll", "Unsupported", "0.0")]
[InlineData(@"SL4\foo.dll", "Silverlight", "4.0")]
[InlineData(@"SL3\sub1\foo.dll", "Silverlight", "3.0")]
[InlineData(@"SL20\sub1\sub2\foo.dll", "Silverlight", "2.0")]
[InlineData(@"net\foo.dll", ".NETFramework", "")]
[InlineData(@"winrt45\foo.dll", ".NETCore", "4.5")]
[InlineData(@"aspnet50\foo.dll", "ASP.NET", "5.0")]
[InlineData(@"aspnetcore50\foo.dll", "ASP.NETCore", "5.0")]
public void ParseFrameworkFolderName(string path, string identifier, string version)
{
// Arrange
Version expectedVersion = String.IsNullOrEmpty(version) ?
new Version() :
new Version(version);
// Act
var actual = VersionUtility.ParseFrameworkFolderName(path);
// Assert
Assert.Equal(identifier, actual.Identifier);
Assert.Equal(expectedVersion, actual.Version);
}
[Fact]
public void GetFrameworkStringFromFrameworkName()
{
// Arrange
var net40 = new FrameworkName(".NETFramework", new Version(4, 0));
var net40Client = new FrameworkName(".NETFramework", new Version(4, 0), "Client");
var sl3 = new FrameworkName("Silverlight", new Version(3, 0));
var sl4 = new FrameworkName("Silverlight", new Version(4, 0));
var wp7 = new FrameworkName("Silverlight", new Version(4, 0), "WindowsPhone");
var wp7Mango = new FrameworkName("Silverlight", new Version(4, 0), "WindowsPhone71");
var netMicro41 = new FrameworkName(".NETMicroFramework", new Version(4, 1));
var winrt = new FrameworkName(".NETCore", new Version(4, 5));
// Act
string net40Value = VersionUtility.GetFrameworkString(net40);
string net40ClientValue = VersionUtility.GetFrameworkString(net40Client);
string sl3Value = VersionUtility.GetFrameworkString(sl3);
string sl4Value = VersionUtility.GetFrameworkString(sl4);
string wp7Value = VersionUtility.GetFrameworkString(wp7);
string wp7MangoValue = VersionUtility.GetFrameworkString(wp7Mango);
string netMicro41Value = VersionUtility.GetFrameworkString(netMicro41);
string winrtValue = VersionUtility.GetFrameworkString(winrt);
// Assert
Assert.Equal(".NETFramework4.0", net40Value);
Assert.Equal(".NETFramework4.0-Client", net40ClientValue);
Assert.Equal("Silverlight3.0", sl3Value);
Assert.Equal("Silverlight4.0", sl4Value);
Assert.Equal("Silverlight4.0-WindowsPhone", wp7Value);
Assert.Equal("Silverlight4.0-WindowsPhone71", wp7MangoValue);
Assert.Equal(".NETMicroFramework4.1", netMicro41Value);
Assert.Equal(".NETCore4.5", winrtValue);
}
[Fact]
public void ParseVersionSpecWithNullThrows()
{
// Act & Assert
ExceptionAssert.ThrowsArgNull(() => VersionUtility.ParseVersionSpec(null), "value");
}
[Fact]
public void ParseVersionSpecSimpleVersionNoBrackets()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("1.2");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal(null, versionInfo.MaxVersion);
Assert.False(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecSimpleVersionNoBracketsExtraSpaces()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec(" 1 . 2 ");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal(null, versionInfo.MaxVersion);
Assert.False(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecMaxOnlyInclusive()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("(,1.2]");
// Assert
Assert.Equal(null, versionInfo.MinVersion);
Assert.False(versionInfo.IsMinInclusive);
Assert.Equal("1.2", versionInfo.MaxVersion.ToString());
Assert.True(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecMaxOnlyExclusive()
{
var versionInfo = VersionUtility.ParseVersionSpec("(,1.2)");
Assert.Equal(null, versionInfo.MinVersion);
Assert.False(versionInfo.IsMinInclusive);
Assert.Equal("1.2", versionInfo.MaxVersion.ToString());
Assert.False(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecExactVersion()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("[1.2]");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal("1.2", versionInfo.MaxVersion.ToString());
Assert.True(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecMinOnlyExclusive()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("(1.2,)");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.False(versionInfo.IsMinInclusive);
Assert.Equal(null, versionInfo.MaxVersion);
Assert.False(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecRangeExclusiveExclusive()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("(1.2,2.3)");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.False(versionInfo.IsMinInclusive);
Assert.Equal("2.3", versionInfo.MaxVersion.ToString());
Assert.False(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecRangeExclusiveInclusive()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("(1.2,2.3]");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.False(versionInfo.IsMinInclusive);
Assert.Equal("2.3", versionInfo.MaxVersion.ToString());
Assert.True(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecRangeInclusiveExclusive()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("[1.2,2.3)");
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal("2.3", versionInfo.MaxVersion.ToString());
Assert.False(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecRangeInclusiveInclusive()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec("[1.2,2.3]");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal("2.3", versionInfo.MaxVersion.ToString());
Assert.True(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecRangeInclusiveInclusiveExtraSpaces()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec(" [ 1 .2 , 2 .3 ] ");
// Assert
Assert.Equal("1.2", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal("2.3", versionInfo.MaxVersion.ToString());
Assert.True(versionInfo.IsMaxInclusive);
}
[Fact]
public void NormalizeVersionFillsInZerosForUnsetVersionParts()
{
// Act
Version version = VersionUtility.NormalizeVersion(new Version("1.5"));
// Assert
Assert.Equal(new Version("1.5.0.0"), version);
}
[Fact]
public void ParseVersionSpecRangeIntegerRanges()
{
// Act
var versionInfo = VersionUtility.ParseVersionSpec(" [1, 2] ");
// Assert
Assert.Equal("1.0", versionInfo.MinVersion.ToString());
Assert.True(versionInfo.IsMinInclusive);
Assert.Equal("2.0", versionInfo.MaxVersion.ToString());
Assert.True(versionInfo.IsMaxInclusive);
}
[Fact]
public void ParseVersionSpecRangeNegativeIntegerRanges()
{
// Act
IVersionSpec versionInfo;
bool parsed = VersionUtility.TryParseVersionSpec(" [-1, 2] ", out versionInfo);
Assert.False(parsed);
Assert.Null(versionInfo);
}
public static IEnumerable<object[]> TrimVersionData
{
get
{
yield return new object[] { new Version(1, 2, 3, 0), new Version(1, 2, 3) };
yield return new object[] { new Version("1.2.3.0"), new Version("1.2.3") };
yield return new object[] { new Version(1, 2, 0, 0), new Version(1, 2) };
yield return new object[] { new Version("1.2.0.0"), new Version("1.2") };
yield return new object[] { new Version(1, 2, 0, 5), new Version(1, 2, 0, 5) };
}
}
[Theory]
[PropertyData("TrimVersionData")]
public void TrimVersionTrimsRevisionIfZero(Version version, Version expected)
{
// Act
var result = VersionUtility.TrimVersion(version);
// Assert
Assert.Equal(expected, result);
}
[Fact]
public void GetAllPossibleVersionsTwoDigits()
{
// Arrange
var expectedVersions = new[] {
new SemanticVersion("1.1"),
new SemanticVersion("1.1.0"),
new SemanticVersion("1.1.0.0")
};
// Act
var versions = VersionUtility.GetPossibleVersions(new SemanticVersion("1.1")).ToList();
// Assert
Assert.Equal(expectedVersions, versions);
}
[Fact]
public void GetAllPossibleVersionsThreeDigits()
{
// Arrange
var expectedVersions = new[] {
new SemanticVersion("1.0"),
new SemanticVersion("1.0.0"),
new SemanticVersion("1.0.0.0"),
};
// Act
var versions = VersionUtility.GetPossibleVersions(new SemanticVersion("1.0.0")).ToList();
// Assert
Assert.Equal(expectedVersions, versions);
}
[Fact]
public void GetAllPossibleVersionsFourDigits()
{
// Arrange
var expectedVersions = new[] {
new SemanticVersion("1.0"),
new SemanticVersion("1.0.0"),
new SemanticVersion("1.0.0.0"),
};
var expectedVersionStrings = new[] {
"1.0",
"1.0.0",
"1.0.0.0"
};
// Act
var versions = VersionUtility.GetPossibleVersions(new SemanticVersion("1.0.0.0")).ToList();
// Assert
Assert.Equal(expectedVersions, versions);
Assert.Equal(expectedVersionStrings, versions.Select(v => v.ToString()));
}
[Fact]
public void GetAllPossibleVersionsThreeDigitsWithZeroBetween()
{
// Arrange
var expectedVersions = new[] {
new SemanticVersion("1.0.1"),
new SemanticVersion("1.0.1.0")
};
var expectedVersionStrings = new[]
{
"1.0.1",
"1.0.1.0",
};
// Act
var versions = VersionUtility.GetPossibleVersions(new SemanticVersion("1.0.1")).ToList();
// Assert
Assert.Equal(expectedVersions, versions);
Assert.Equal(expectedVersionStrings, versions.Select(v => v.ToString()));
}
[Fact]
public void GetAllPossibleVersionsFourDigitsWithTrailingZeros()
{
// Arrange
var expectedVersions = new[] {
new SemanticVersion("1.1"),
new SemanticVersion("1.1.0"),
new SemanticVersion("1.1.0.0"),
};
var expectedVersionStrings = new[]
{
"1.1",
"1.1.0",
"1.1.0.0",
};
// Act
var versions = VersionUtility.GetPossibleVersions(new SemanticVersion("1.1.0.0")).ToList();
// Assert
Assert.Equal(expectedVersions, versions);
Assert.Equal(expectedVersionStrings, versions.Select(v => v.ToString()));
}
[Fact]
public void GetSafeVersions()
{
// Act
IVersionSpec versionSpec1 = VersionUtility.GetSafeRange(new SemanticVersion("1.3"));
IVersionSpec versionSpec2 = VersionUtility.GetSafeRange(new SemanticVersion("0.9"));
IVersionSpec versionSpec3 = VersionUtility.GetSafeRange(new SemanticVersion("2.9.45.6"));
// Assert
AssertSafeVersion(versionSpec1, new SemanticVersion("1.3"), new SemanticVersion("1.4"));
AssertSafeVersion(versionSpec2, new SemanticVersion("0.9"), new SemanticVersion("0.10"));
AssertSafeVersion(versionSpec3, new SemanticVersion("2.9.45.6"), new SemanticVersion("2.10"));
}
private void AssertSafeVersion(IVersionSpec versionSpec, SemanticVersion minVer, SemanticVersion maxVer)
{
Assert.True(versionSpec.IsMinInclusive);
Assert.False(versionSpec.IsMaxInclusive);
Assert.Equal(versionSpec.MinVersion, minVer);
Assert.Equal(versionSpec.MaxVersion, maxVer);
}
[Fact]
public void TrimVersionThrowsIfVersionNull()
{
ExceptionAssert.ThrowsArgNull(() => VersionUtility.TrimVersion(null), "version");
}
[Fact]
public void IsCompatibleReturnsFalseForSlAndWindowsPhoneFrameworks()
{
// Arrange
FrameworkName sl3 = VersionUtility.ParseFrameworkName("sl3");
FrameworkName wp7 = VersionUtility.ParseFrameworkName("sl3-wp");
// Act
bool wp7CompatibleWithSl = VersionUtility.IsCompatible(sl3, wp7);
bool slCompatibleWithWp7 = VersionUtility.IsCompatible(wp7, sl3);
// Assert
Assert.False(slCompatibleWithWp7);
Assert.False(wp7CompatibleWithSl);
}
[Fact]
public void IsCompatibleWindowsPhoneVersions()
{
// Arrange
FrameworkName wp7 = VersionUtility.ParseFrameworkName("sl3-wp");
FrameworkName wp7Mango = VersionUtility.ParseFrameworkName("sl4-wp71");
FrameworkName wp8 = new FrameworkName("WindowsPhone, Version=v8.0");
FrameworkName wp81 = new FrameworkName("WindowsPhone, Version=v8.1");
FrameworkName wpa81 = VersionUtility.ParseFrameworkName("wpa81");
// Act
bool wp7MangoCompatibleWithwp7 = VersionUtility.IsCompatible(wp7, wp7Mango);
bool wp7CompatibleWithwp7Mango = VersionUtility.IsCompatible(wp7Mango, wp7);
bool wp7CompatibleWithwp8 = VersionUtility.IsCompatible(wp8, wp7);
bool wp7MangoCompatibleWithwp8 = VersionUtility.IsCompatible(wp8, wp7Mango);
bool wp8CompatibleWithwp7 = VersionUtility.IsCompatible(wp7, wp8);
bool wp8CompatbielWithwp7Mango = VersionUtility.IsCompatible(wp7Mango, wp8);
bool wp81CompatibleWithwp8 = VersionUtility.IsCompatible(wp81, wp8);
bool wpa81CompatibleWithwp81 = VersionUtility.IsCompatible(wpa81, wp81);
// Assert
Assert.False(wp7MangoCompatibleWithwp7);
Assert.True(wp7CompatibleWithwp7Mango);
Assert.True(wp7CompatibleWithwp8);
Assert.True(wp7MangoCompatibleWithwp8);
Assert.False(wp8CompatibleWithwp7);
Assert.False(wp8CompatbielWithwp7Mango);
Assert.True(wp81CompatibleWithwp8);
Assert.False(wpa81CompatibleWithwp81);
}
[Theory]
[InlineData("wp")]
[InlineData("wp7")]
[InlineData("wp70")]
[InlineData("windowsphone")]
[InlineData("windowsphone7")]
[InlineData("windowsphone70")]
[InlineData("sl3-wp")]
public void WindowsPhone7IdentifierCompatibleWithAllWPProjects(string wp7Identifier)
{
// Arrange
var wp7Package = VersionUtility.ParseFrameworkName(wp7Identifier);
var wp7Project = new FrameworkName("Silverlight, Version=v3.0, Profile=WindowsPhone");
var mangoProject = new FrameworkName("Silverlight, Version=v4.0, Profile=WindowsPhone71");
var apolloProject = new FrameworkName("WindowsPhone, Version=v8.0");
// Act & Assert
Assert.True(VersionUtility.IsCompatible(wp7Project, wp7Package));
Assert.True(VersionUtility.IsCompatible(mangoProject, wp7Package));
Assert.True(VersionUtility.IsCompatible(apolloProject, wp7Package));
}
[Theory]
[InlineData("wp71")]
[InlineData("windowsphone71")]
[InlineData("sl4-wp71")]
public void WindowsPhoneMangoIdentifierCompatibleWithAllWPProjects(string mangoIdentifier)
{
// Arrange
var mangoPackage = VersionUtility.ParseFrameworkName(mangoIdentifier);
var wp7Project = new FrameworkName("Silverlight, Version=v3.0, Profile=WindowsPhone");
var mangoProject = new FrameworkName("Silverlight, Version=v4.0, Profile=WindowsPhone71");
var apolloProject = new FrameworkName("WindowsPhone, Version=v8.0");
// Act & Assert
Assert.False(VersionUtility.IsCompatible(wp7Project, mangoPackage));
Assert.True(VersionUtility.IsCompatible(mangoProject, mangoPackage));
Assert.True(VersionUtility.IsCompatible(apolloProject, mangoPackage));
}
[Theory]
[InlineData("wp8")]
[InlineData("wp80")]
[InlineData("windowsphone8")]
[InlineData("windowsphone80")]
public void WindowsPhoneApolloIdentifierCompatibleWithAllWPProjects(string apolloIdentifier)
{
// Arrange
var apolloPackage = VersionUtility.ParseFrameworkName(apolloIdentifier);
var wp7Project = new FrameworkName("Silverlight, Version=v3.0, Profile=WindowsPhone");
var mangoProject = new FrameworkName("Silverlight, Version=v4.0, Profile=WindowsPhone71");
var apolloProject = new FrameworkName("WindowsPhone, Version=v8.0");
// Act & Assert
Assert.False(VersionUtility.IsCompatible(wp7Project, apolloPackage));
Assert.False(VersionUtility.IsCompatible(mangoProject, apolloPackage));
Assert.True(VersionUtility.IsCompatible(apolloProject, apolloPackage));
}
[Theory]
[InlineData("windows")]
[InlineData("windows8")]
[InlineData("win")]
[InlineData("win8")]
public void WindowsIdentifierCompatibleWithWindowsStoreAppProjects(string identifier)
{
// Arrange
var packageFramework = VersionUtility.ParseFrameworkName(identifier);
var projectFramework = new FrameworkName(".NETCore, Version=4.5");
// Act && Assert
Assert.True(VersionUtility.IsCompatible(projectFramework, packageFramework));
}
[Theory]
[InlineData("windows9")]
[InlineData("win9")]
[InlineData("win10")]
[InlineData("windows81")]
[InlineData("windows45")]
[InlineData("windows1")]
public void WindowsIdentifierWithUnsupportedVersionNotCompatibleWithWindowsStoreAppProjects(string identifier)
{
// Arrange
var packageFramework = VersionUtility.ParseFrameworkName(identifier);
var projectFramework = new FrameworkName(".NETCore, Version=4.5");
// Act && Assert
Assert.False(VersionUtility.IsCompatible(projectFramework, packageFramework));
}
[Fact]
public void NetFrameworkCompatibiilityIsCompatibleReturns()
{
// Arrange
FrameworkName net40 = VersionUtility.ParseFrameworkName("net40");
FrameworkName net40Client = VersionUtility.ParseFrameworkName("net40-client");
// Act
bool netClientCompatibleWithNet = VersionUtility.IsCompatible(net40, net40Client);
bool netCompatibleWithClient = VersionUtility.IsCompatible(net40Client, net40);
// Assert
Assert.True(netClientCompatibleWithNet);
Assert.True(netCompatibleWithClient);
}
[Fact]
public void LowerFrameworkVersionsAreNotCompatibleWithHigherFrameworkVersionsWithSameFrameworkName()
{
// Arrange
FrameworkName net40 = VersionUtility.ParseFrameworkName("net40");
FrameworkName net20 = VersionUtility.ParseFrameworkName("net20");
// Act
bool net40CompatibleWithNet20 = VersionUtility.IsCompatible(net20, net40);
bool net20CompatibleWithNet40 = VersionUtility.IsCompatible(net40, net20);
// Assert
Assert.False(net40CompatibleWithNet20);
Assert.True(net20CompatibleWithNet40);
}
[Fact]
public void IsCompatibleReturnsTrueIfSupportedFrameworkListIsEmpty()
{
// Arrange
FrameworkName net40Client = VersionUtility.ParseFrameworkName("net40-client");
// Act
var result = VersionUtility.IsCompatible(net40Client, Enumerable.Empty<FrameworkName>());
// Assert
Assert.True(result);
}
[Fact]
public void IsCompatibleReturnsTrueIfProjectFrameworkIsNully()
{
// Arrange
FrameworkName net40Client = VersionUtility.ParseFrameworkName("net40-client");
// Act
var result = VersionUtility.IsCompatible(null, net40Client);
// Assert
Assert.True(result);
}
[Fact]
public void ParseVersionThrowsIfExclusiveMinAndMaxVersionSpecContainsNoValues()
{
// Arrange
var versionString = "(,)";
// Assert
var exception = Assert.Throws<ArgumentException>(() => VersionUtility.ParseVersionSpec(versionString));
Assert.Equal("'(,)' is not a valid version string.", exception.Message);
}
[Fact]
public void ParseVersionThrowsIfInclusiveMinAndMaxVersionSpecContainsNoValues()
{
// Arrange
var versionString = "[,]";
// Assert
var exception = Assert.Throws<ArgumentException>(() => VersionUtility.ParseVersionSpec(versionString));
Assert.Equal("'[,]' is not a valid version string.", exception.Message);
}
[Fact]
public void ParseVersionThrowsIfInclusiveMinAndExclusiveMaxVersionSpecContainsNoValues()
{
// Arrange
var versionString = "[,)";
// Assert
var exception = Assert.Throws<ArgumentException>(() => VersionUtility.ParseVersionSpec(versionString));
Assert.Equal("'[,)' is not a valid version string.", exception.Message);
}
[Fact]
public void ParseVersionThrowsIfExclusiveMinAndInclusiveMaxVersionSpecContainsNoValues()
{
// Arrange
var versionString = "(,]";
// Assert
var exception = Assert.Throws<ArgumentException>(() => VersionUtility.ParseVersionSpec(versionString));
Assert.Equal("'(,]' is not a valid version string.", exception.Message);
}
[Fact]
public void ParseVersionThrowsIfVersionSpecIsMissingVersionComponent()
{
// Arrange
var versionString = "(,1.3..2]";
// Assert
var exception = Assert.Throws<ArgumentException>(() => VersionUtility.ParseVersionSpec(versionString));
Assert.Equal("'(,1.3..2]' is not a valid version string.", exception.Message);
}
[Fact]
public void ParseVersionThrowsIfVersionSpecContainsMoreThen4VersionComponents()
{
// Arrange
var versionString = "(1.2.3.4.5,1.2]";
// Assert
var exception = Assert.Throws<ArgumentException>(() => VersionUtility.ParseVersionSpec(versionString));
Assert.Equal("'(1.2.3.4.5,1.2]' is not a valid version string.", exception.Message);
}
[Theory]
[PropertyData("VersionSpecData")]
public void ParseVersionParsesTokensVersionsCorrectly(string versionString, VersionSpec versionSpec)
{
// Act
var actual = VersionUtility.ParseVersionSpec(versionString);
// Assert
Assert.Equal(versionSpec.IsMinInclusive, actual.IsMinInclusive);
Assert.Equal(versionSpec.IsMaxInclusive, actual.IsMaxInclusive);
Assert.Equal(versionSpec.MinVersion, actual.MinVersion);
Assert.Equal(versionSpec.MaxVersion, actual.MaxVersion);
}
public static IEnumerable<object[]> VersionSpecData
{
get
{
yield return new object[] { "(1.2.3.4, 3.2)", new VersionSpec { MinVersion = new SemanticVersion("1.2.3.4"), MaxVersion = new SemanticVersion("3.2"), IsMinInclusive = false, IsMaxInclusive = false } };
yield return new object[] { "(1.2.3.4, 3.2]", new VersionSpec { MinVersion = new SemanticVersion("1.2.3.4"), MaxVersion = new SemanticVersion("3.2"), IsMinInclusive = false, IsMaxInclusive = true } };
yield return new object[] { "[1.2, 3.2.5)", new VersionSpec { MinVersion = new SemanticVersion("1.2"), MaxVersion = new SemanticVersion("3.2.5"), IsMinInclusive = true, IsMaxInclusive = false } };
yield return new object[] { "[2.3.7, 3.2.4.5]", new VersionSpec { MinVersion = new SemanticVersion("2.3.7"), MaxVersion = new SemanticVersion("3.2.4.5"), IsMinInclusive = true, IsMaxInclusive = true } };
yield return new object[] { "(, 3.2.4.5]", new VersionSpec { MinVersion = null, MaxVersion = new SemanticVersion("3.2.4.5"), IsMinInclusive = false, IsMaxInclusive = true } };
yield return new object[] { "(1.6, ]", new VersionSpec { MinVersion = new SemanticVersion("1.6"), MaxVersion = null, IsMinInclusive = false, IsMaxInclusive = true } };
yield return new object[] { "(1.6)", new VersionSpec { MinVersion = new SemanticVersion("1.6"), MaxVersion = new SemanticVersion("1.6"), IsMinInclusive = false, IsMaxInclusive = false } };
yield return new object[] { "[2.7]", new VersionSpec { MinVersion = new SemanticVersion("2.7"), MaxVersion = new SemanticVersion("2.7"), IsMinInclusive = true, IsMaxInclusive = true } };
}
}
[Fact]
public void ParsePortableFrameworkNameThrowsIfProfileIsEmpty()
{
// Act & Assert
ExceptionAssert.ThrowsArgumentException(
() => VersionUtility.ParseFrameworkName("portable45"),
"profilePart",
"Portable target framework must not have an empty profile part.");
}
[Fact]
public void ParsePortableFrameworkNameThrowsIfProfileContainsASpace()
{
// Act & Assert
ExceptionAssert.ThrowsArgumentException(
() => VersionUtility.ParseFrameworkName("portable45-sl4 net45"),
"profilePart",
"The profile part of a portable target framework must not contain empty space.");
}
[Fact]
public void ParsePortableFrameworkNameThrowsIfProfileContainsEmptyComponent()
{
// Act & Assert
ExceptionAssert.ThrowsArgumentException(
() => VersionUtility.ParseFrameworkName("portable45-sl4++net45"),
"profilePart",
"The profile part of a portable target framework must not contain empty component.");
}
[Fact]
public void ParsePortableFrameworkNameThrowsIfProfileContainsPortableFramework()
{
// Act & Assert
ExceptionAssert.ThrowsArgumentException(
() => VersionUtility.ParseFrameworkName("portable-net45+portable"),
"profilePart",
"The profile part of a portable target framework must not contain a portable framework component.");
}
[Theory]
[InlineData(".NETPortable, Version=4.0, Profile=Profile1", "portable-net45+sl40+wp71")]
[InlineData(".NETPortable, Version=4.0, Profile=Profile2", "portable-win+sl30+wp71")]
[InlineData(".NETPortable, Version=4.0, Profile=Profile4", "portable-sl20+wp")]
public void TestGetShortNameForPortableFramework(string longName, string expectedShortName)
{
// Arrange
var portableProfiles = BuildTestProfileTable();
var framework = new FrameworkName(longName);
// Act
string shortName = VersionUtility.GetShortFrameworkName(framework, portableProfiles);
// Assert
Assert.Equal(expectedShortName, shortName);
}
[Fact]
public void GetShortNameDoesNotIncludeVersionIfVersionIs00()
{
// Act
string shortName = VersionUtility.GetShortFrameworkName(new FrameworkName("Silverlight, Version=v0.0"));
// Assert
Assert.Equal("sl", shortName);
}
[Theory]
[InlineData("ASP.Net, Version=5.0", "aspnet50")]
[InlineData("ASP.NetCore, Version=5.0", "aspnetcore50")]
// No such thing as ASP.Net, Version=5.1 at time of writing, just testing the logic
[InlineData("ASP.Net, Version=5.1", "aspnet51")]
[InlineData("ASP.NetCore, Version=5.1", "aspnetcore51")]
public void GetShortNameForASPNetAndASPNetCoreWorks(string longName, string expectedShortName)
{
// Arrange
var fxName = new FrameworkName(longName);
// Act
string shortName = VersionUtility.GetShortFrameworkName(fxName);
// Assert
Assert.Equal(expectedShortName, shortName);
}
[Fact]
public void GetShortNameForNetCore45ReturnsWindows()
{
// Act
string shortName = VersionUtility.GetShortFrameworkName(new FrameworkName(".NETCore, Version=v4.5"));
// Assert
Assert.Equal("win", shortName);
}
[Fact]
public void GetShortNameForNetCore451ReturnsWindows81()
{
// Act
string shortName = VersionUtility.GetShortFrameworkName(new FrameworkName(".NETCore, Version=v4.5.1"));
// Assert
Assert.Equal("win81", shortName);
}
[Fact]
public void GetShortNameForWindowsPhoneReturnsWP()
{
// Act
string shortName = VersionUtility.GetShortFrameworkName(new FrameworkName("Silverlight, Version=v3.0, Profile=WindowsPhone"));
// Assert
Assert.Equal("wp", shortName);
}
[Fact]
public void GetShortNameForMangoReturnsWP71()
{
// Act
string shortName = VersionUtility.GetShortFrameworkName(new FrameworkName("Silverlight, Version=v4.0, Profile=WindowsPhone71"));
// Assert
Assert.Equal("wp71", shortName);
}
[Theory]
[InlineData("Xamarin.Mac, Version=v1.0", "xamarinmac10")]
[InlineData("Xamarin.iOS, Version=v1.0", "xamarinios10")]
[InlineData("Xamarin.PlayStation3, Version=v1.0", "xamarinpsthree10")]
[InlineData("Xamarin.PlayStation4, Version=v1.0", "xamarinpsfour10")]
[InlineData("Xamarin.PlayStationVita, Version=v1.0", "xamarinpsvita10")]
[InlineData("Xamarin.Xbox360, Version=v1.0", "xamarinxboxthreesixty10")]
[InlineData("Xamarin.XboxOne, Version=v1.0", "xamarinxboxone10")]
public void GetShortNameForXamarinFrameworks(string frameworkIdentifier, string expectedShortName)
{
// Act
string shortName = VersionUtility.GetShortFrameworkName(new FrameworkName(frameworkIdentifier));
// Assert
Assert.Equal(expectedShortName, shortName);
}
[Theory]
[InlineData(".NETPortable, Version=4.0, Profile=Profile1", "portable-net45+xamarinmac10+xamarinios10")]
[InlineData(".NETPortable, Version=4.0, Profile=Profile2", "portable-net40+win+xamarinpsthree10+xamarinpsfour10+xamarinpsvita10")]
[InlineData(".NETPortable, Version=4.0, Profile=Profile3", "portable-net40+xamarinxboxthreesixty10+xamarinxboxone10")]
public void TestGetShortNameForPortableXamarinFrameworks(string frameworkIdentifier, string expectedShortName)
{
// Arrange
var profileCollection = new NetPortableProfileCollection();
var profile1 = new NetPortableProfile(
"Profile1",
new[] {
new FrameworkName(".NETFramework, Version=4.5"),
new FrameworkName("Xamarin.Mac, Version=1.0"),
new FrameworkName("Xamarin.iOS, Version=1.0"),
});
var profile2 = new NetPortableProfile(
"Profile2",
new[] {
new FrameworkName(".NETFramework, Version=4.0"),
new FrameworkName(".NetCore, Version=4.5"),
new FrameworkName("Xamarin.PlayStation3, Version=1.0"),
new FrameworkName("Xamarin.PlayStation4, Version=1.0"),
new FrameworkName("Xamarin.PlayStationVita, Version=1.0"),
});
var profile3 = new NetPortableProfile(
"Profile3",
new[] {
new FrameworkName(".NETFramework, Version=4.0"),
new FrameworkName("Xamarin.Xbox360, Version=1.0"),
new FrameworkName("Xamarin.XboxOne, Version=1.0"),
});
profileCollection.Add(profile1);
profileCollection.Add(profile2);
profileCollection.Add(profile3);
var portableProfileTable = new NetPortableProfileTable(profileCollection);
var framework = new FrameworkName(frameworkIdentifier);
// Act
string shortName = VersionUtility.GetShortFrameworkName(framework, portableProfileTable);
// Assert
Assert.Equal(expectedShortName, shortName);
}
[Theory]
[InlineData("portable-netcore45+sl4", "silverlight45")]
[InlineData("portable-net40+win8+sl4+wp71+wpa81", "wp81")]
public void IsCompatibleReturnsTrueForPortableFrameworkAndNormalFramework(string packageFramework, string projectFramework)
{
// Arrange
var packagePortableFramework = VersionUtility.ParseFrameworkName(packageFramework);
var projectPortableFramework = VersionUtility.ParseFrameworkName(projectFramework);
// Act
bool isCompatible = VersionUtility.IsCompatible(projectPortableFramework, packagePortableFramework);
// Assert
Assert.True(isCompatible);
}
[Fact]
public void IsCompatibleReturnsFalseForPortableFrameworkAndNormalFramework()
{
// Arrange
var portableFramework = VersionUtility.ParseFrameworkName("portable-netcore45+sl4");
var normalFramework = VersionUtility.ParseFrameworkName("silverlight3");
// Act
bool isCompatible = VersionUtility.IsCompatible(normalFramework, portableFramework);
// Assert
Assert.False(isCompatible);
}
[Fact]
public void IsCompatibleReturnsFalseForPortableFrameworkAndNormalFramework2()
{
// Arrange
var portableFramework = VersionUtility.ParseFrameworkName("portable-netcore45+sl4");
var normalFramework = VersionUtility.ParseFrameworkName("wp7");
// Act
bool isCompatible = VersionUtility.IsCompatible(normalFramework, portableFramework);
// Assert
Assert.False(isCompatible);
}
[Theory]
// COMPATIBLE: Same framework, easy first case
[InlineData("aspnet50", "aspnet50", true)]
[InlineData("aspnetcore50", "aspnetcore50", true)]
// COMPATIBLE: Project targeting later framework
[InlineData("aspnet51", "aspnet50", true)]
[InlineData("aspnet51", "net451", true)]
[InlineData("aspnet51", "net40", true)]
[InlineData("aspnet51", "net20", true)]
[InlineData("aspnetcore51", "aspnetcore50", true)]
// NOT COMPATIBLE: aspnet into aspnetcore and vice-versa
[InlineData("aspnet50", "aspnetcore50", false)]
[InlineData("aspnetcore50", "aspnet50", false)]
// COMPATIBLE: aspnet project, net package (any version)
// Don't get excited by version numbers here. I'm just randomly guessing higher version numbers :)
[InlineData("aspnet50", "net451", true)]
[InlineData("aspnet50", "net40", true)]
[InlineData("aspnet50", "net20", true)]
[InlineData("aspnet50", "net50", true)]
[InlineData("aspnet50", "net60", true)]
[InlineData("aspnet50", "net70", true)]
// NOT COMPATIBLE: Package targeting later framework
[InlineData("aspnet50", "aspnet51", false)]
[InlineData("aspnetcore50", "aspnetcore51", false)]
// NOT COMPATIBLE: aspnetcore project, netcore/win package (any version)
// Don't get excited by version numbers here. I'm just randomly guessing higher version numbers :)
[InlineData("aspnetcore50", "netcore70", false)]
[InlineData("aspnetcore50", "netcore60", false)]
[InlineData("aspnetcore50", "netcore50", false)]
[InlineData("aspnetcore50", "netcore451", false)]
[InlineData("aspnetcore50", "netcore45", false)]
[InlineData("aspnetcore50", "win81", false)]
[InlineData("aspnetcore50", "win80", false)]
// COMPATIBLE: Portable Packages
[InlineData("aspnet50", "portable-net45+win81", true)]
// NOT COMPATIBLE: Portable Packages
[InlineData("aspnet50", "portable-sl50+win81", false)]
[InlineData("aspnetcore50", "portable-net45+win81", false)]
[InlineData("aspnetcore50", "portable-net45+sl40", false)]
public void IsCompatibleMatrixForASPNetFrameworks(string projectFramework, string packageFramework, bool compatible)
{
Assert.Equal(
VersionUtility.IsCompatible(
VersionUtility.ParseFrameworkName(projectFramework),
VersionUtility.ParseFrameworkName(packageFramework)),
compatible);
}
[Theory]
[InlineData("silverlight4")]
[InlineData("silverlight3")]
[InlineData("silverlight5")]
[InlineData("netcore45")]
[InlineData("netcore5")]
public void IsCompatibleReturnsFalseForNormalFrameworkAndPortableFramework(string frameworkValue)
{
// Arrange
var portableFramework = VersionUtility.ParseFrameworkName("portable-netcore45+sl4");
var normalFramework = VersionUtility.ParseFrameworkName(frameworkValue);
// Act
bool isCompatible = VersionUtility.IsCompatible(portableFramework, normalFramework);
// Assert
Assert.False(isCompatible);
}
[Theory]
[InlineData("portable-netcore45+sl4+wp", "portable-netcore45+sl4")]
[InlineData("portable-netcore45+sl4+wp", "portable-netcore5+wp7")]
[InlineData("portable-netcore45+sl4+wp+net", "portable-wp7")]
[InlineData("portable-net40+win8+sl4+wp71+wpa81", "portable-wpa81+wp81")]
[InlineData("portable-wp8+wpa81", "portable-wpa81+wp81")]
[InlineData("portable-wp81+wpa81", "portable-wpa81+wp81")]
[InlineData("portable-wpa81+wp81", "portable-wpa81+wp81")]
public void IsCompatibleReturnsTrueForPortableFrameworkAndPortableFramework(string packageFramework, string projectFramework)
{
// Arrange
var packagePortableFramework = VersionUtility.ParseFrameworkName(packageFramework);
var projectPortableFramework = VersionUtility.ParseFrameworkName(projectFramework);
// Act
bool isCompatible = VersionUtility.IsCompatible(projectPortableFramework, packagePortableFramework);
// Assert
Assert.True(isCompatible);
}
[Theory]
[InlineData("portable-netcore45+sl4+wp", "portable-netcore4+sl4")]
[InlineData("portable-netcore45+sl4+wp", "portable-netcore5+wp7+net")]
[InlineData("portable-netcore45+sl4+wp+net", "portable-wp7+netcore4")]
[InlineData("portable-netcore45+sl4", "portable-net4+wp7")]
[InlineData("portable-net40+win8+sl4+wp71", "portable-wpa81+wp81")]
public void IsCompatibleReturnsFalseForPortableFrameworkAndPortableFramework(string packageFramework, string projectFramework)
{
// Arrange
var packagePortableFramework = VersionUtility.ParseFrameworkName(packageFramework);
var projectPortableFramework = VersionUtility.ParseFrameworkName(projectFramework);
// Act
bool isCompatible = VersionUtility.IsCompatible(projectPortableFramework, packagePortableFramework);
// Assert
Assert.False(isCompatible);
}
[Theory]
[InlineData("portable-net45+sl5+wp71", "portable-net45+sl5+wp71", -3)]
[InlineData("portable-net45+sl5+wp71", "portable-net45+sl5+wp71+win8", -4)]
[InlineData("portable-net45+sl5+wp71", "portable-net45+sl4+wp71+win8", -54)]
[InlineData("portable-net45+sl5+wp71", "portable-net4+sl4+wp71+win8", -104)]
[InlineData("portable-net45+sl5+wp71", "portable-net4+sl4+wp7+win8", -154)]
[InlineData("portable-win8+wp8", "portable-win8+wp7", -52)]
[InlineData("portable-win8+wp8", "portable-win8+wp7+silverlight4", -53)]
public void TestGetCompatibilityBetweenPortableLibraryAndPortableLibrary(string frameworkName, string targetFrameworkName, int expectedScore)
{
// Arrange
var framework = VersionUtility.ParseFrameworkName(frameworkName);
var targetFramework = VersionUtility.ParseFrameworkName(targetFrameworkName);
// Act
int score = VersionUtility.GetCompatibilityBetweenPortableLibraryAndPortableLibrary(framework, targetFramework);
// Assert
Assert.Equal(expectedScore, score);
}
/// <summary>
/// The following example is used in the comments provided in the product code too including how the computation takes place
/// Refer VersionUtility.GetCompatibilityBetweenPortableLibraryAndPortableLibrary for more details
/// For example, Let Project target net45+sl5+monotouch+monoandroid. And, Package has 4 profiles
/// A: net45+sl5, B: net40+sl5+monotouch, C: net40+sl4+monotouch+monoandroid, D: net40+sl4+monotouch+monoandroid+wp71
/// </summary>
[Theory]
[InlineData("portable-net45+sl50+MonoTouch+MonoAndroid", "portable-net45+sl5", -502)]
//[InlineData("portable-net45+sl50+MonoTouch+MonoAndroid", "portable-net40+sl5+MonoTouch", -303)]
//[InlineData("portable-net45+sl50+MonoTouch+MonoAndroid", "portable-net40+sl4+MonoTouch+MonoAndroid", -104)]
//[InlineData("portable-net45+sl50+MonoTouch+MonoAndroid", "portable-net40+sl4+MonoTouch+MonoAndroid+wp71", -105)]
public void TestGetCompatibilityBetweenPortableLibraryAndPortableLibraryWithOptionalFx(string frameworkName, string targetFrameworkName, int expectedScore)
{
var profile1 = new NetPortableProfile(
"Profile1",
new[] {
new FrameworkName(".NETFramework, Version=4.5"),
new FrameworkName("Silverlight, Version=5.0"),
},
new[] {
new FrameworkName("MonoTouch, Version=0.0"),
new FrameworkName("MonoAndroid, Version=0.0"),
});
NetPortableProfileCollection profileCollection = new NetPortableProfileCollection();
profileCollection.Add(profile1);
var portableProfileTable = new NetPortableProfileTable(profileCollection);
// Arrange
var framework = VersionUtility.ParseFrameworkName(frameworkName);
var targetFramework = VersionUtility.ParseFrameworkName(targetFrameworkName);
// Act
int score = VersionUtility.GetCompatibilityBetweenPortableLibraryAndPortableLibrary(framework, targetFramework, portableProfileTable);
// Assert
Assert.Equal(expectedScore, score);
}
/// <summary>
/// This test is used to ensure that when the packageTargetFrameworkProfile is already available in NetPortableProfileCollection
/// Still the
/// </summary>
[Theory]
[InlineData("portable-net40+sl40+MonoTouch+MonoAndroid", "portable-net40+sl40+MonoTouch+MonoAndroid", -4)]
[InlineData("portable-net45+MonoTouch+MonoAndroid", "portable-net40+sl40+MonoTouch+MonoAndroid", -54)]
public void TestGetCompatibilityBetweenPortableLibraryAndPortableLibraryWithPreLoadedPackageProfile(string frameworkName, string targetFrameworkName, int expectedScore)
{
var profile1 = new NetPortableProfile(
"Profile1",
new[] {
new FrameworkName(".NETFramework, Version=4.0"),
new FrameworkName("Silverlight, Version=4.0"),
},
new[] {
new FrameworkName("MonoTouch, Version=0.0"),
new FrameworkName("MonoAndroid, Version=0.0"),
});
NetPortableProfileCollection profileCollection = new NetPortableProfileCollection();
profileCollection.Add(profile1);
var portableProfileTable = new NetPortableProfileTable(profileCollection);
// Arrange
var framework = VersionUtility.ParseFrameworkName(frameworkName);
var targetFramework = VersionUtility.ParseFrameworkName(targetFrameworkName);
// Act
int score = VersionUtility.GetCompatibilityBetweenPortableLibraryAndPortableLibrary(framework, targetFramework, portableProfileTable);
// Assert
Assert.Equal(expectedScore, score);
}
/// <summary>
/// (a) First case is when projectFrameworkName is not compatible with packageTargetFrameworkName and returns long.MinValue
/// (b) Second case is where there is a framework in portable packageFramework compatible with the Mono projectFramework
/// (c) The last cases are when there is no framework in portable packageFrameowrk that is compatible with the Mono projectFramework
/// (i) Check if there is an *installed* portable profile which has the aforementioned project framework as an optional framework
/// (ii) And, check if the project framework version >= found optional framework and that the supported frameworks are compatible with the ones in packageTargetFramework
/// (iii) In the source code, this is the else part in method GetCompatibilityBetweenPortableLibraryAndNonPortableLibrary()
/// </summary>
[Theory]
[InlineData("MonoAndroid10", "portable-net45+sl5", long.MinValue)]
// 180388626433 below = (1L << 32 + 5) + 1 + (10 * (1L << 32)). And, this is the score accumulated
// across methods like CalculateVersionDistance and GetProfileCompatibility
[InlineData("MonoAndroid10", "portable-net40+sl4+wp71+win8+MonoAndroid10", (180388626433 - 5 * 2))]
[InlineData("MonoAndroid10", "portable-net40+sl4+wp71+win8", -4*2)]
[InlineData("MonoAndroid10", "portable-net45+wp8+win8", -3*2)]
[InlineData("MonoAndroid10", "portable-net40+sl4+wp71+win8+MonoTouch", -5*2)]
[InlineData("MonoAndroid20", "portable-net40+sl4+wp71+win8+MonoTouch", -5 * 2)]
[InlineData("MonoAndroid", "portable-net40+sl4+wp71+win8+MonoTouch", long.MinValue)]
public void TestGetCompatibilityBetweenPortableLibraryAndNonPortableLibraryForMono(string projectFrameworkName, string packageTargetFrameworkName, long expectedScore)
{
// Arrange
var profile1 = new NetPortableProfile(
"Profile1",
new[] {
new FrameworkName(".NETFramework, Version=4.5"),
new FrameworkName("Silverlight, Version=4.0"),
new FrameworkName("WindowsPhone, Version=7.1"),
new FrameworkName("Windows, Version=8.0"),
},
new[] {
new FrameworkName("MonoTouch, Version=1.0"),
new FrameworkName("MonoAndroid, Version=1.0"),
new FrameworkName("MonoMac, Version=1.0"),
});
var profile2 = new NetPortableProfile(
"Profile2",
new[] {
new FrameworkName(".NETFramework, Version=4.5"),
new FrameworkName("WindowsPhone, Version=8.0"),
new FrameworkName("Windows, Version=8.0"),
},
new[] {
new FrameworkName("MonoTouch, Version=1.0"),
new FrameworkName("MonoAndroid, Version=1.0"),
});
NetPortableProfileCollection profileCollection = new NetPortableProfileCollection();
profileCollection.Add(profile1);
profileCollection.Add(profile2);
var portableProfileTable = new NetPortableProfileTable(profileCollection);
// Arrange
var framework = VersionUtility.ParseFrameworkName(projectFrameworkName);
var targetFramework = VersionUtility.ParseFrameworkName(packageTargetFrameworkName);
// Act
long score = VersionUtility.GetCompatibilityBetweenPortableLibraryAndNonPortableLibrary(framework, targetFramework, portableProfileTable);
// Assert
Assert.Equal(expectedScore, score);
}
private NetPortableProfileTable BuildTestProfileTable()
{
var profileCollection = new NetPortableProfileCollection();
var profile1 = new NetPortableProfile(
"Profile1",
new[] {
new FrameworkName(".NETFramework, Version=4.5"),
new FrameworkName("Silverlight, Version=4.0"),
new FrameworkName("WindowsPhone, Version=7.1"),
});
var profile2 = new NetPortableProfile(
"Profile2",
new[] {
new FrameworkName(".NetCore, Version=4.5"),
new FrameworkName("Silverlight, Version=3.0"),
new FrameworkName("WindowsPhone, Version=7.1"),
});
var profile3 = new NetPortableProfile(
"Profile3",
new[] {
new FrameworkName(".NetCore, Version=4.5"),
new FrameworkName(".NETFramework, Version=2.0"),
});
var profile4 = new NetPortableProfile(
"Profile4",
new[] {
new FrameworkName("Silverlight, Version=2.0"),
new FrameworkName("Silverlight, Version=3.0, Profile=WindowsPhone"),
});
profileCollection.Add(profile1);
profileCollection.Add(profile2);
profileCollection.Add(profile3);
profileCollection.Add(profile4);
return new NetPortableProfileTable(profileCollection);
}
}
}
| |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using osu.Framework.Allocation;
using osu.Framework.Bindables;
using osu.Framework.Extensions.Color4Extensions;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Cursor;
using osu.Framework.Graphics.Shapes;
using osu.Framework.Graphics.UserInterface;
using osu.Framework.Input.Events;
using osu.Game.Graphics;
using osu.Game.Graphics.UserInterface;
using osu.Game.Tournament.Components;
using osu.Game.Tournament.Models;
using osu.Game.Tournament.Screens.Editors;
using osuTK;
using osuTK.Graphics;
using osuTK.Input;
namespace osu.Game.Tournament.Screens.Ladder.Components
{
public class DrawableMatchTeam : DrawableTournamentTeam, IHasContextMenu
{
private readonly TournamentMatch match;
private readonly bool losers;
private TournamentSpriteText scoreText;
private Box background;
private Box backgroundRight;
private readonly Bindable<int?> score = new Bindable<int?>();
private readonly BindableBool completed = new BindableBool();
private Color4 colourWinner;
private readonly Func<bool> isWinner;
private LadderEditorScreen ladderEditor;
[Resolved(canBeNull: true)]
private LadderInfo ladderInfo { get; set; }
private void setCurrent()
{
if (ladderInfo == null) return;
//todo: tournamentgamebase?
if (ladderInfo.CurrentMatch.Value != null)
ladderInfo.CurrentMatch.Value.Current.Value = false;
ladderInfo.CurrentMatch.Value = match;
ladderInfo.CurrentMatch.Value.Current.Value = true;
}
[Resolved(CanBeNull = true)]
private LadderEditorInfo editorInfo { get; set; }
public DrawableMatchTeam(TournamentTeam team, TournamentMatch match, bool losers)
: base(team)
{
this.match = match;
this.losers = losers;
Size = new Vector2(150, 40);
Flag.Scale = new Vector2(0.54f);
Flag.Anchor = Flag.Origin = Anchor.CentreLeft;
AcronymText.Anchor = AcronymText.Origin = Anchor.CentreLeft;
AcronymText.Padding = new MarginPadding { Left = 50 };
AcronymText.Font = OsuFont.Torus.With(size: 22, weight: FontWeight.Bold);
if (match != null)
{
isWinner = () => match.Winner == Team;
completed.BindTo(match.Completed);
if (team != null)
score.BindTo(team == match.Team1.Value ? match.Team1Score : match.Team2Score);
}
}
[BackgroundDependencyLoader(true)]
private void load(OsuColour colours, LadderEditorScreen ladderEditor)
{
this.ladderEditor = ladderEditor;
colourWinner = losers
? Color4Extensions.FromHex("#8E7F48")
: Color4Extensions.FromHex("#1462AA");
InternalChildren = new Drawable[]
{
background = new Box
{
RelativeSizeAxes = Axes.Both,
},
new Container
{
Padding = new MarginPadding(5),
RelativeSizeAxes = Axes.Both,
Children = new Drawable[]
{
AcronymText,
Flag,
}
},
new Container
{
Masking = true,
Width = 0.3f,
Anchor = Anchor.CentreRight,
Origin = Anchor.CentreRight,
RelativeSizeAxes = Axes.Both,
Children = new Drawable[]
{
backgroundRight = new Box
{
Colour = OsuColour.Gray(0.1f),
Alpha = 0.8f,
RelativeSizeAxes = Axes.Both,
},
scoreText = new TournamentSpriteText
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Font = OsuFont.Torus.With(size: 22),
}
}
}
};
completed.BindValueChanged(_ => updateWinStyle());
score.BindValueChanged(val =>
{
scoreText.Text = val.NewValue?.ToString() ?? string.Empty;
updateWinStyle();
}, true);
}
protected override bool OnClick(ClickEvent e)
{
if (Team == null || editorInfo != null) return false;
if (!match.Current.Value)
{
setCurrent();
return true;
}
if (e.Button == MouseButton.Left)
{
if (score.Value == null)
{
match.StartMatch();
}
else if (!match.Completed.Value)
score.Value++;
}
else
{
if (match.Progression.Value?.Completed.Value == true)
// don't allow changing scores if the match has a progression. can cause large data loss
return false;
if (match.Completed.Value && match.Winner != Team)
// don't allow changing scores from the non-winner
return false;
if (score.Value > 0)
score.Value--;
else
match.CancelMatchStart();
}
return false;
}
private void updateWinStyle()
{
bool winner = completed.Value && isWinner?.Invoke() == true;
background.FadeColour(winner ? Color4.White : Color4Extensions.FromHex("#444"), winner ? 500 : 0, Easing.OutQuint);
backgroundRight.FadeColour(winner ? colourWinner : Color4Extensions.FromHex("#333"), winner ? 500 : 0, Easing.OutQuint);
AcronymText.Colour = winner ? Color4.Black : Color4.White;
scoreText.Font = scoreText.Font.With(weight: winner ? FontWeight.Bold : FontWeight.Regular);
}
public MenuItem[] ContextMenuItems
{
get
{
if (editorInfo == null)
return Array.Empty<MenuItem>();
return new MenuItem[]
{
new OsuMenuItem("Set as current", MenuItemType.Standard, setCurrent),
new OsuMenuItem("Join with", MenuItemType.Standard, () => ladderEditor.BeginJoin(match, false)),
new OsuMenuItem("Join with (loser)", MenuItemType.Standard, () => ladderEditor.BeginJoin(match, true)),
new OsuMenuItem("Remove", MenuItemType.Destructive, () => ladderEditor.Remove(match)),
};
}
}
}
}
| |
using System;
using AutoMapper;
using FizzWare.NBuilder;
using Moq;
using NUnit.Framework;
using ReMi.Common.Utils.Repository;
using ReMi.TestUtils.UnitTests;
using ReMi.DataAccess.BusinessEntityGateways.ProductRequests;
using ReMi.DataAccess.Exceptions;
using ReMi.DataEntities.ProductRequests;
namespace ReMi.DataAccess.Tests.ProductRequests
{
public class ProductRequestGatewayTests : TestClassFor<ProductRequestGateway>
{
private Mock<IRepository<ProductRequestType>> _productRequestTypeRepositoryMock;
private Mock<IRepository<ProductRequestGroup>> _productRequestGroupRepositoryMock;
private Mock<IRepository<ProductRequestTask>> _productRequestTaskRepositoryMock;
private Mock<IMappingEngine> _mappingEngineMock;
protected override ProductRequestGateway ConstructSystemUnderTest()
{
return new ProductRequestGateway
{
ProductRequestTypeRepository = _productRequestTypeRepositoryMock.Object,
ProductRequestGroupRepository = _productRequestGroupRepositoryMock.Object,
ProductRequestTaskRepository = _productRequestTaskRepositoryMock.Object,
MappingEngine = _mappingEngineMock.Object
};
}
protected override void TestInitialize()
{
_productRequestTypeRepositoryMock = new Mock<IRepository<ProductRequestType>>();
_productRequestGroupRepositoryMock = new Mock<IRepository<ProductRequestGroup>>();
_productRequestTaskRepositoryMock = new Mock<IRepository<ProductRequestTask>>();
_mappingEngineMock = new Mock<IMappingEngine>();
_mappingEngineMock
.Setup(m => m.Map<BusinessEntities.ProductRequests.ProductRequestType, ProductRequestType>(It.IsAny<BusinessEntities.ProductRequests.ProductRequestType>()))
.Returns<BusinessEntities.ProductRequests.ProductRequestType>(r => new ProductRequestType
{
ExternalId = r.ExternalId,
Name = r.Name
});
_mappingEngineMock
.Setup(m => m.Map<BusinessEntities.ProductRequests.ProductRequestGroup, ProductRequestGroup>(It.IsAny<BusinessEntities.ProductRequests.ProductRequestGroup>()))
.Returns<BusinessEntities.ProductRequests.ProductRequestGroup>(r => new ProductRequestGroup
{
ExternalId = r.ExternalId,
Name = r.Name
});
_mappingEngineMock
.Setup(m => m.Map<BusinessEntities.ProductRequests.ProductRequestTask, ProductRequestTask>(It.IsAny<BusinessEntities.ProductRequests.ProductRequestTask>()))
.Returns<BusinessEntities.ProductRequests.ProductRequestTask>(r => new ProductRequestTask
{
ExternalId = r.ExternalId,
Question = r.Question
});
base.TestInitialize();
}
[Test]
public void GetRequestTypes_ShouldGetEntities_WhenInvoked()
{
Sut.GetRequestTypes();
_productRequestTypeRepositoryMock.VerifyGet(o => o.Entities);
}
[Test]
[ExpectedException(typeof(EntityAlreadyExistsException), ExpectedMessage = "Entity '2d9a05af-605e-42d5-a1b0-5403306f3069' with type 'ProductRequestType' already exists")]
public void CreateProductRequestType_ShouldThrowException_WhenExternalIdAlreadyExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
SetupProductRequestTypeRepository(id);
Sut.CreateProductRequestType(type);
}
[Test]
public void CreateProductRequestType_ShouldMapInputData_WhenInvoked()
{
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.Build();
Sut.CreateProductRequestType(type);
_mappingEngineMock.Verify(o => o.Map<BusinessEntities.ProductRequests.ProductRequestType, ProductRequestType>(type));
}
[Test]
public void CreateProductRequestType_ShouldInsertNewRecord_WhenInvoked()
{
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.Build();
Sut.CreateProductRequestType(type);
_productRequestTypeRepositoryMock.Verify(o =>
o.Insert(It.Is<ProductRequestType>(x =>
x.ExternalId == type.ExternalId)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestType'")]
public void UpdateProductRequestType_ShouldThrowException_WhenExternalIdAlreadyExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
Sut.UpdateProductRequestType(type);
}
[Test]
public void UpdateProductRequestType_ShouldInsertNewRecord_WhenInvoked()
{
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.Build();
SetupProductRequestTypeRepository(type.ExternalId);
var newName = RandomData.RandomString(1, 100);
Sut.UpdateProductRequestType(new BusinessEntities.ProductRequests.ProductRequestType
{
ExternalId = type.ExternalId,
Name = newName
});
_productRequestTypeRepositoryMock.Verify(o =>
o.Update(It.Is<ProductRequestType>(x =>
x.ExternalId == type.ExternalId
&& x.Name == newName)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestType'")]
public void DeleteProductRequestType_ShouldThrowException_WhenExternalIdNotExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
Sut.DeleteProductRequestType(type.ExternalId);
}
[Test]
public void DeleteProductRequestType_ShouldInsertNewRecord_WhenInvoked()
{
var type = Builder<BusinessEntities.ProductRequests.ProductRequestType>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.Build();
SetupProductRequestTypeRepository(type.ExternalId);
Sut.DeleteProductRequestType(type.ExternalId);
_productRequestTypeRepositoryMock.Verify(o =>
o.Delete(It.Is<ProductRequestType>(x =>
x.ExternalId == type.ExternalId)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestType'")]
public void CreateProductRequestGroup_ShouldThrowException_WhenRelatedTypeNotExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var type = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestTypeId, id)
.Build();
Sut.CreateProductRequestGroup(type);
}
[Test]
[ExpectedException(typeof(EntityAlreadyExistsException), ExpectedMessage = "Entity '2d9a05af-605e-42d5-a1b0-5403306f3069' with type 'ProductRequestGroup' already exists")]
public void CreateProductRequestGroup_ShouldThrowException_WhenExternalIdAlreadyExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var typeId = Guid.Parse("{922E2940-6F6B-4406-B7BC-4A0FD89F112E}");
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, id)
.With(o => o.ProductRequestTypeId, typeId)
.Build();
SetupProductRequestTypeRepository(typeId);
SetupProductRequestGroupRepository(id);
Sut.CreateProductRequestGroup(group);
}
[Test]
public void CreateProductRequestGroup_ShouldMapInputData_WhenInvoked()
{
var typeId = Guid.NewGuid();
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestTypeId, typeId)
.Build();
SetupProductRequestTypeRepository(typeId);
Sut.CreateProductRequestGroup(group);
_mappingEngineMock.Verify(o => o.Map<BusinessEntities.ProductRequests.ProductRequestGroup, ProductRequestGroup>(group));
}
[Test]
public void CreateProductRequestGroup_ShouldInsertNewRecord_WhenInvoked()
{
var typeExternalId = Guid.NewGuid();
var typeId = RandomData.RandomInt(1, int.MaxValue);
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestTypeId, typeExternalId)
.Build();
SetupProductRequestTypeRepository(typeExternalId, typeId);
Sut.CreateProductRequestGroup(group);
_productRequestGroupRepositoryMock.Verify(o =>
o.Insert(It.Is<ProductRequestGroup>(x =>
x.ExternalId == group.ExternalId
&& x.ProductRequestTypeId == typeId)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestGroup'")]
public void UpdateProductRequestGroup_ShouldThrowException_WhenExternalIdAlreadyExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
Sut.UpdateProductRequestGroup(group);
}
[Test]
public void UpdateProductRequestGroup_ShouldInsertNewRecord_WhenInvoked()
{
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.Build();
SetupProductRequestGroupRepository(group.ExternalId);
var newName = RandomData.RandomString(1, 100);
Sut.UpdateProductRequestGroup(new BusinessEntities.ProductRequests.ProductRequestGroup
{
ExternalId = group.ExternalId,
Name = newName
});
_productRequestGroupRepositoryMock.Verify(o =>
o.Update(It.Is<ProductRequestGroup>(x =>
x.ExternalId == group.ExternalId
&& x.Name == newName)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestGroup'")]
public void DeleteProductRequestGroup_ShouldThrowException_WhenRelatedTypeNotExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, id )
.With(o => o.ProductRequestTypeId, Guid.NewGuid())
.Build();
Sut.DeleteProductRequestGroup(group.ExternalId);
}
[Test]
public void DeleteProductRequestGroup_ShouldInsertNewRecord_WhenInvoked()
{
var typeExternalId = Guid.NewGuid();
var typeId = RandomData.RandomInt(1, int.MaxValue);
var group = Builder<BusinessEntities.ProductRequests.ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestTypeId, typeExternalId)
.Build();
SetupProductRequestTypeRepository(typeExternalId, typeId);
SetupProductRequestGroupRepository(group.ExternalId);
Sut.DeleteProductRequestGroup(group.ExternalId);
_productRequestGroupRepositoryMock.Verify(o =>
o.Delete(It.Is<ProductRequestGroup>(x =>
x.ExternalId == group.ExternalId)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestGroup'")]
public void CreateProductRequestTask_ShouldThrowException_WhenRelatedTypeNotExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestGroupId, id)
.Build();
Sut.CreateProductRequestTask(task);
}
[Test]
[ExpectedException(typeof(EntityAlreadyExistsException), ExpectedMessage = "Entity '2d9a05af-605e-42d5-a1b0-5403306f3069' with type 'ProductRequestTask' already exists")]
public void CreateProductRequestTask_ShouldThrowException_WhenExternalIdAlreadyExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var groupId = Guid.Parse("{922E2940-6F6B-4406-B7BC-4A0FD89F112E}");
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, id)
.With(o => o.ProductRequestGroupId, groupId)
.Build();
SetupProductRequestGroupRepository(groupId);
SetupProductRequestTaskRepository(id);
Sut.CreateProductRequestTask(task);
}
[Test]
public void CreateProductRequestTask_ShouldMapInputData_WhenInvoked()
{
var groupId = Guid.NewGuid();
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestGroupId, groupId)
.Build();
SetupProductRequestGroupRepository(groupId);
Sut.CreateProductRequestTask(task);
_mappingEngineMock.Verify(o => o.Map<BusinessEntities.ProductRequests.ProductRequestTask, ProductRequestTask>(task));
}
[Test]
public void CreateProductRequestTask_ShouldInsertNewRecord_WhenInvoked()
{
var groupId = Guid.NewGuid();
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.With(o => o.ProductRequestGroupId, groupId)
.Build();
SetupProductRequestGroupRepository(groupId);
Sut.CreateProductRequestTask(task);
_productRequestTaskRepositoryMock.Verify(o =>
o.Insert(It.Is<ProductRequestTask>(x =>
x.ExternalId == task.ExternalId)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestTask'")]
public void UpdateProductRequestTask_ShouldThrowException_WhenExternalIdAlreadyExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
Sut.UpdateProductRequestTask(task);
}
[Test]
public void UpdateProductRequestTask_ShouldInsertNewRecord_WhenInvoked()
{
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, Guid.NewGuid())
.Build();
SetupProductRequestTaskRepository(task.ExternalId);
var newQuestion = RandomData.RandomString(1, 100);
Sut.UpdateProductRequestTask(new BusinessEntities.ProductRequests.ProductRequestTask
{
ExternalId = task.ExternalId,
Question = newQuestion
});
_productRequestTaskRepositoryMock.Verify(o =>
o.Update(It.Is<ProductRequestTask>(x =>
x.ExternalId == task.ExternalId
&& x.Question == newQuestion)));
}
[Test]
[ExpectedException(typeof(EntityNotFoundException), ExpectedMessage = "Could not find entity '2d9a05af-605e-42d5-a1b0-5403306f3069' of type 'ProductRequestTask'")]
public void DeleteProductRequestTask_ShouldThrowException_WhenRelatedTypeNotExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
Sut.DeleteProductRequestTask(task.ExternalId);
}
[Test]
[ExpectedException(typeof(EntityHasRelatedData), ExpectedMessage = "Entity '2d9a05af-605e-42d5-a1b0-5403306f3069' with type 'ProductRequestTask' has related data")]
public void DeleteProductRequestTask_ShouldThrowException_WhenRelatedRegistrationsNotExists()
{
var id = Guid.Parse("{2D9A05AF-605E-42D5-A1B0-5403306F3069}");
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, id)
.Build();
SetupProductRequestTaskRepository(task.ExternalId, true);
Sut.DeleteProductRequestTask(task.ExternalId);
}
[Test]
public void DeleteProductRequestTask_ShouldInsertNewRecord_WhenInvoked()
{
var taskId = Guid.NewGuid();
var task = Builder<BusinessEntities.ProductRequests.ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, taskId)
.Build();
SetupProductRequestTaskRepository(task.ExternalId);
Sut.DeleteProductRequestTask(task.ExternalId);
_productRequestTaskRepositoryMock.Verify(o =>
o.Delete(It.Is<ProductRequestTask>(x =>
x.ExternalId == task.ExternalId)));
}
[Test]
public void Dispose_ShouldDisposeAllInternalObjects_WhenInvoked()
{
Sut.OnDisposing();
_productRequestTaskRepositoryMock.Verify(o => o.Dispose());
_productRequestTypeRepositoryMock.Verify(o => o.Dispose());
_productRequestGroupRepositoryMock.Verify(o => o.Dispose());
_mappingEngineMock.Verify(o => o.Dispose());
}
private void SetupProductRequestTypeRepository(Guid? externalId, int? id = null)
{
var type = Builder<ProductRequestType>.CreateNew()
.With(o => o.ExternalId, externalId ?? Guid.NewGuid())
.With(o => o.ProductRequestTypeId, id ?? RandomData.RandomInt(1, int.MaxValue))
.Build();
_productRequestTypeRepositoryMock.SetupEntities(new[] { type });
}
private void SetupProductRequestGroupRepository(Guid? externalId)
{
var group = Builder<ProductRequestGroup>.CreateNew()
.With(o => o.ExternalId, externalId ?? Guid.NewGuid())
.Build();
_productRequestGroupRepositoryMock.SetupEntities(new[] { group });
}
private void SetupProductRequestTaskRepository(Guid? externalId, bool relatedRegistrations = false)
{
var task = Builder<ProductRequestTask>.CreateNew()
.With(o => o.ExternalId, externalId ?? Guid.NewGuid())
.With(o => o.RegistrationTasks, !relatedRegistrations ? null : Builder<ProductRequestRegistrationTask>.CreateListOfSize(1).Build())
.Build();
_productRequestTaskRepositoryMock.SetupEntities(new[] { task });
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/iam/v1/iam_policy.proto
// Original file comments:
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#region Designer generated code
using System;
using System.Threading;
using System.Threading.Tasks;
using grpc = global::Grpc.Core;
namespace Google.Cloud.Iam.V1 {
/// <summary>
/// ## API Overview
///
/// Manages Identity and Access Management (IAM) policies.
///
/// Any implementation of an API that offers access control features
/// implements the google.iam.v1.IAMPolicy interface.
///
/// ## Data model
///
/// Access control is applied when a principal (user or service account), takes
/// some action on a resource exposed by a service. Resources, identified by
/// URI-like names, are the unit of access control specification. Service
/// implementations can choose the granularity of access control and the
/// supported permissions for their resources.
/// For example one database service may allow access control to be
/// specified only at the Table level, whereas another might allow access control
/// to also be specified at the Column level.
///
/// ## Policy Structure
///
/// See google.iam.v1.Policy
///
/// This is intentionally not a CRUD style API because access control policies
/// are created and deleted implicitly with the resources to which they are
/// attached.
/// </summary>
public static partial class IAMPolicy
{
static readonly string __ServiceName = "google.iam.v1.IAMPolicy";
static readonly grpc::Marshaller<global::Google.Cloud.Iam.V1.SetIamPolicyRequest> __Marshaller_SetIamPolicyRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.SetIamPolicyRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Google.Cloud.Iam.V1.Policy> __Marshaller_Policy = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.Policy.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Google.Cloud.Iam.V1.GetIamPolicyRequest> __Marshaller_GetIamPolicyRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.GetIamPolicyRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Google.Cloud.Iam.V1.TestIamPermissionsRequest> __Marshaller_TestIamPermissionsRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.TestIamPermissionsRequest.Parser.ParseFrom);
static readonly grpc::Marshaller<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> __Marshaller_TestIamPermissionsResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::Google.Cloud.Iam.V1.TestIamPermissionsResponse.Parser.ParseFrom);
static readonly grpc::Method<global::Google.Cloud.Iam.V1.SetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy> __Method_SetIamPolicy = new grpc::Method<global::Google.Cloud.Iam.V1.SetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy>(
grpc::MethodType.Unary,
__ServiceName,
"SetIamPolicy",
__Marshaller_SetIamPolicyRequest,
__Marshaller_Policy);
static readonly grpc::Method<global::Google.Cloud.Iam.V1.GetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy> __Method_GetIamPolicy = new grpc::Method<global::Google.Cloud.Iam.V1.GetIamPolicyRequest, global::Google.Cloud.Iam.V1.Policy>(
grpc::MethodType.Unary,
__ServiceName,
"GetIamPolicy",
__Marshaller_GetIamPolicyRequest,
__Marshaller_Policy);
static readonly grpc::Method<global::Google.Cloud.Iam.V1.TestIamPermissionsRequest, global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> __Method_TestIamPermissions = new grpc::Method<global::Google.Cloud.Iam.V1.TestIamPermissionsRequest, global::Google.Cloud.Iam.V1.TestIamPermissionsResponse>(
grpc::MethodType.Unary,
__ServiceName,
"TestIamPermissions",
__Marshaller_TestIamPermissionsRequest,
__Marshaller_TestIamPermissionsResponse);
/// <summary>Service descriptor</summary>
public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor
{
get { return global::Google.Cloud.Iam.V1.IamPolicyReflection.Descriptor.Services[0]; }
}
/// <summary>Base class for server-side implementations of IAMPolicy</summary>
public abstract partial class IAMPolicyBase
{
/// <summary>
/// Sets the access control policy on the specified resource. Replaces any
/// existing policy.
/// </summary>
/// <param name="request">The request received from the client.</param>
/// <param name="context">The context of the server-side call handler being invoked.</param>
/// <returns>The response to send back to the client (wrapped by a task).</returns>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Iam.V1.Policy> SetIamPolicy(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
/// <summary>
/// Gets the access control policy for a resource.
/// Returns an empty policy if the resource exists and does not have a policy
/// set.
/// </summary>
/// <param name="request">The request received from the client.</param>
/// <param name="context">The context of the server-side call handler being invoked.</param>
/// <returns>The response to send back to the client (wrapped by a task).</returns>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Iam.V1.Policy> GetIamPolicy(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
/// <summary>
/// Returns permissions that a caller has on the specified resource.
/// If the resource does not exist, this will return an empty set of
/// permissions, not a NOT_FOUND error.
/// </summary>
/// <param name="request">The request received from the client.</param>
/// <param name="context">The context of the server-side call handler being invoked.</param>
/// <returns>The response to send back to the client (wrapped by a task).</returns>
public virtual global::System.Threading.Tasks.Task<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> TestIamPermissions(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, grpc::ServerCallContext context)
{
throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, ""));
}
}
/// <summary>Client for IAMPolicy</summary>
public partial class IAMPolicyClient : grpc::ClientBase<IAMPolicyClient>
{
/// <summary>Creates a new client for IAMPolicy</summary>
/// <param name="channel">The channel to use to make remote calls.</param>
public IAMPolicyClient(grpc::Channel channel) : base(channel)
{
}
/// <summary>Creates a new client for IAMPolicy that uses a custom <c>CallInvoker</c>.</summary>
/// <param name="callInvoker">The callInvoker to use to make remote calls.</param>
public IAMPolicyClient(grpc::CallInvoker callInvoker) : base(callInvoker)
{
}
/// <summary>Protected parameterless constructor to allow creation of test doubles.</summary>
protected IAMPolicyClient() : base()
{
}
/// <summary>Protected constructor to allow creation of configured clients.</summary>
/// <param name="configuration">The client configuration.</param>
protected IAMPolicyClient(ClientBaseConfiguration configuration) : base(configuration)
{
}
/// <summary>
/// Sets the access control policy on the specified resource. Replaces any
/// existing policy.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::Google.Cloud.Iam.V1.Policy SetIamPolicy(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SetIamPolicy(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Sets the access control policy on the specified resource. Replaces any
/// existing policy.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::Google.Cloud.Iam.V1.Policy SetIamPolicy(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_SetIamPolicy, null, options, request);
}
/// <summary>
/// Sets the access control policy on the specified resource. Replaces any
/// existing policy.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> SetIamPolicyAsync(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return SetIamPolicyAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Sets the access control policy on the specified resource. Replaces any
/// existing policy.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> SetIamPolicyAsync(global::Google.Cloud.Iam.V1.SetIamPolicyRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_SetIamPolicy, null, options, request);
}
/// <summary>
/// Gets the access control policy for a resource.
/// Returns an empty policy if the resource exists and does not have a policy
/// set.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::Google.Cloud.Iam.V1.Policy GetIamPolicy(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetIamPolicy(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets the access control policy for a resource.
/// Returns an empty policy if the resource exists and does not have a policy
/// set.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::Google.Cloud.Iam.V1.Policy GetIamPolicy(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_GetIamPolicy, null, options, request);
}
/// <summary>
/// Gets the access control policy for a resource.
/// Returns an empty policy if the resource exists and does not have a policy
/// set.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> GetIamPolicyAsync(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return GetIamPolicyAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Gets the access control policy for a resource.
/// Returns an empty policy if the resource exists and does not have a policy
/// set.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::Google.Cloud.Iam.V1.Policy> GetIamPolicyAsync(global::Google.Cloud.Iam.V1.GetIamPolicyRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_GetIamPolicy, null, options, request);
}
/// <summary>
/// Returns permissions that a caller has on the specified resource.
/// If the resource does not exist, this will return an empty set of
/// permissions, not a NOT_FOUND error.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::Google.Cloud.Iam.V1.TestIamPermissionsResponse TestIamPermissions(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return TestIamPermissions(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Returns permissions that a caller has on the specified resource.
/// If the resource does not exist, this will return an empty set of
/// permissions, not a NOT_FOUND error.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The response received from the server.</returns>
public virtual global::Google.Cloud.Iam.V1.TestIamPermissionsResponse TestIamPermissions(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, grpc::CallOptions options)
{
return CallInvoker.BlockingUnaryCall(__Method_TestIamPermissions, null, options, request);
}
/// <summary>
/// Returns permissions that a caller has on the specified resource.
/// If the resource does not exist, this will return an empty set of
/// permissions, not a NOT_FOUND error.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="headers">The initial metadata to send with the call. This parameter is optional.</param>
/// <param name="deadline">An optional deadline for the call. The call will be cancelled if deadline is hit.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> TestIamPermissionsAsync(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken))
{
return TestIamPermissionsAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken));
}
/// <summary>
/// Returns permissions that a caller has on the specified resource.
/// If the resource does not exist, this will return an empty set of
/// permissions, not a NOT_FOUND error.
/// </summary>
/// <param name="request">The request to send to the server.</param>
/// <param name="options">The options for the call.</param>
/// <returns>The call object.</returns>
public virtual grpc::AsyncUnaryCall<global::Google.Cloud.Iam.V1.TestIamPermissionsResponse> TestIamPermissionsAsync(global::Google.Cloud.Iam.V1.TestIamPermissionsRequest request, grpc::CallOptions options)
{
return CallInvoker.AsyncUnaryCall(__Method_TestIamPermissions, null, options, request);
}
/// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary>
protected override IAMPolicyClient NewInstance(ClientBaseConfiguration configuration)
{
return new IAMPolicyClient(configuration);
}
}
/// <summary>Creates service definition that can be registered with a server</summary>
/// <param name="serviceImpl">An object implementing the server-side handling logic.</param>
public static grpc::ServerServiceDefinition BindService(IAMPolicyBase serviceImpl)
{
return grpc::ServerServiceDefinition.CreateBuilder()
.AddMethod(__Method_SetIamPolicy, serviceImpl.SetIamPolicy)
.AddMethod(__Method_GetIamPolicy, serviceImpl.GetIamPolicy)
.AddMethod(__Method_TestIamPermissions, serviceImpl.TestIamPermissions).Build();
}
}
}
#endregion
| |
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2008-2010 Ricardo Quesada
Copyright (c) 2011 Zynga Inc.
Copyright (c) 2011-2012 openxlive.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
using System;
using Microsoft.Xna.Framework;
namespace CocosSharp
{
/** CCLayerGradient is a subclass of CCLayerColor that draws gradients across
the background.
All features from CCLayerColor are valid, plus the following new features:
- direction
- final color
- interpolation mode
Color is interpolated between the startColor and endColor along the given
vector (starting at the origin, ending at the terminus). If no vector is
supplied, it defaults to (0, -1) -- a fade from top to bottom.
If 'compressedInterpolation' is disabled, you will not see either the start or end color for
non-cardinal vectors; a smooth gradient implying both end points will be still
be drawn, however.
If ' compressedInterpolation' is enabled (default mode) you will see both the start and end colors of the gradient.
@since v0.99.5
*/
public class CCLayerGradient : CCLayerColor
{
// Whether or not the interpolation will be compressed in order to display all the colors of the gradient both in canonical and non canonical vectors
bool compressedInterpolation;
byte endOpacity;
byte startOpacity;
CCPoint alongVector;
CCColor3B endColor;
#region Properties
public CCColor3B StartColor
{
get { return RealColor; }
set
{
base.Color = value;
UpdateColor();
}
}
public CCColor3B EndColor
{
get { return endColor; }
set
{
endColor = value;
UpdateColor();
}
}
public byte StartOpacity
{
get { return startOpacity; }
set
{
startOpacity = value;
UpdateColor();
}
}
public byte EndOpacity
{
get { return endOpacity; }
set
{
endOpacity = value;
UpdateColor();
}
}
public CCPoint Vector
{
get { return alongVector; }
set
{
alongVector = value;
UpdateColor();
}
}
public bool IsCompressedInterpolation
{
get { return compressedInterpolation; }
set
{
compressedInterpolation = value;
UpdateColor();
}
}
#endregion Properties
#region Constructors
/// <summary>
/// Creates a full-screen CCLayer with a gradient between start and end.
/// </summary>
public CCLayerGradient (CCColor4B start, CCColor4B end) : this(start, end, new CCPoint(0, -1))
{
}
public CCLayerGradient() : this(new CCColor4B(0, 0, 0, 255), new CCColor4B(0, 0, 0, 255))
{
}
public CCLayerGradient(byte startOpacity, byte endOpacity) : base()
{
StartOpacity = startOpacity;
EndOpacity = endOpacity;
}
/// <summary>
/// Creates a full-screen CCLayer with a gradient between start and end in the direction of v.
/// </summary>
public CCLayerGradient (CCColor4B start, CCColor4B end, CCPoint gradientDirection)
: base(new CCColor4B(start.R, start.G, start.B, 255))
{
EndColor = new CCColor3B(end.R, end.G, end.B);
StartOpacity = start.A;
EndOpacity = end.A;
IsCompressedInterpolation = true;
alongVector = gradientDirection;
}
#endregion Constructors
public override void UpdateColor()
{
base.UpdateColor();
float h = alongVector.Length;
if (h == 0)
return;
double c = Math.Sqrt(2.0);
var u = new CCPoint(alongVector.X / h, alongVector.Y / h);
// Compressed Interpolation mode
if (IsCompressedInterpolation)
{
float h2 = 1 / (Math.Abs(u.X) + Math.Abs(u.Y));
u = u * (h2 * (float) c);
}
float opacityf = DisplayedOpacity / 255.0f;
var S = new CCColor4B
{
R = DisplayedColor.R,
G = DisplayedColor.G,
B = DisplayedColor.B,
A = (byte) (StartOpacity * opacityf)
};
var E = new CCColor4B
{
R = EndColor.R,
G = EndColor.G,
B = EndColor.B,
A = (byte) (EndOpacity * opacityf)
};
// (-1, -1)
SquareVertices[0].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c + u.X + u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c + u.X + u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c + u.X + u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c + u.X + u.Y) / (2.0f * c)))
);
// (1, -1)
SquareVertices[1].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c - u.X + u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c - u.X + u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c - u.X + u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c - u.X + u.Y) / (2.0f * c)))
);
// (-1, 1)
SquareVertices[2].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c + u.X - u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c + u.X - u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c + u.X - u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c + u.X - u.Y) / (2.0f * c)))
);
// (1, 1)
SquareVertices[3].Color = new Color(
(byte) (E.R + (S.R - E.R) * ((c - u.X - u.Y) / (2.0f * c))),
(byte) (E.G + (S.G - E.G) * ((c - u.X - u.Y) / (2.0f * c))),
(byte) (E.B + (S.B - E.B) * ((c - u.X - u.Y) / (2.0f * c))),
(byte) (E.A + (S.A - E.A) * ((c - u.X - u.Y) / (2.0f * c)))
);
}
}
}
| |
// Copyright 2021 Esri.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
// You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
// language governing permissions and limitations under the License.
using Android.App;
using Android.OS;
using Android.Widget;
using ArcGISRuntime.Samples.GenerateOfflineMapWithOverrides;
using Esri.ArcGISRuntime.Geometry;
using Esri.ArcGISRuntime.Mapping;
using Esri.ArcGISRuntime.Portal;
using Esri.ArcGISRuntime.Symbology;
using Esri.ArcGISRuntime.Tasks;
using Esri.ArcGISRuntime.Tasks.Offline;
using Esri.ArcGISRuntime.UI;
using Esri.ArcGISRuntime.UI.Controls;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using AlertDialog = Android.App.AlertDialog;
namespace ArcGISRuntimeXamarin.Samples.GenerateOfflineMapWithOverrides
{
[Activity(ConfigurationChanges = Android.Content.PM.ConfigChanges.Orientation | Android.Content.PM.ConfigChanges.ScreenSize)]
[ArcGISRuntime.Samples.Shared.Attributes.Sample(
name: "Generate offline map (overrides)",
category: "Map",
description: "Take a web map offline with additional options for each layer.",
instructions: "Modify the overrides parameters:",
tags: new[] { "LOD", "adjust", "download", "extent", "filter", "offline", "override", "parameters", "reduce", "scale range", "setting" })]
public class GenerateOfflineMapWithOverrides : Activity
{
// Mapview.
private MapView _mapView;
// Generate Button.
private Button _takeMapOfflineButton;
// Progress indicator.
private AlertDialog _alertDialog;
private ProgressBar _progressIndicator;
// The job to generate an offline map.
private GenerateOfflineMapJob _generateOfflineMapJob;
// The extent of the data to take offline.
private Envelope _areaOfInterest = new Envelope(-88.1541, 41.7690, -88.1471, 41.7720, SpatialReferences.Wgs84);
// The ID for a web map item hosted on the server (water network map of Naperville IL).
private const string WebMapId = "acc027394bc84c2fb04d1ed317aac674";
// Values for taking things the map offline.
private string _packagePath;
private OfflineMapTask _takeMapOfflineTask;
private GenerateOfflineMapParameters _parameters;
private GenerateOfflineMapParameterOverrides _overrides;
protected override void OnCreate(Bundle bundle)
{
base.OnCreate(bundle);
Title = "Generate offline map (overrides)";
// Create the UI, setup the control references and execute initialization.
CreateLayout();
Initialize();
}
private void CreateLayout()
{
// Create the layout.
LinearLayout layout = new LinearLayout(this)
{
Orientation = Orientation.Vertical
};
// Add the generate button.
_takeMapOfflineButton = new Button(this)
{
Text = "Take map offline"
};
_takeMapOfflineButton.Click += TakeMapOfflineButton_Click;
layout.AddView(_takeMapOfflineButton);
// Add the mapview.
_mapView = new MapView(this);
layout.AddView(_mapView);
// Add the layout to the view.
SetContentView(layout);
// Create the progress dialog display.
_progressIndicator = new ProgressBar(this);
_progressIndicator.SetProgress(40, true);
AlertDialog.Builder builder = new AlertDialog.Builder(this).SetView(_progressIndicator);
builder.SetCancelable(true);
builder.SetMessage("Generating offline map ...");
_alertDialog = builder.Create();
_alertDialog.SetButton("Cancel", (s, e) => { _generateOfflineMapJob.Cancel(); });
}
private async void Initialize()
{
try
{
// Create the ArcGIS Online portal.
ArcGISPortal portal = await ArcGISPortal.CreateAsync();
// Get the Naperville water web map item using its ID.
PortalItem webmapItem = await PortalItem.CreateAsync(portal, WebMapId);
// Create a map from the web map item.
Map onlineMap = new Map(webmapItem);
// Display the map in the MapView.
_mapView.Map = onlineMap;
// Disable user interactions on the map (no panning or zooming from the initial extent).
_mapView.InteractionOptions = new MapViewInteractionOptions
{
IsEnabled = false
};
// Create a graphics overlay for the extent graphic and apply a renderer.
SimpleLineSymbol aoiOutlineSymbol = new SimpleLineSymbol(SimpleLineSymbolStyle.Solid, System.Drawing.Color.Red, 3);
GraphicsOverlay extentOverlay = new GraphicsOverlay
{
Renderer = new SimpleRenderer(aoiOutlineSymbol)
};
_mapView.GraphicsOverlays.Add(extentOverlay);
// Add a graphic to show the area of interest (extent) that will be taken offline.
Graphic aoiGraphic = new Graphic(_areaOfInterest);
extentOverlay.Graphics.Add(aoiGraphic);
}
catch (Exception ex)
{
// Show the exception message to the user.
ShowStatusMessage(ex.Message);
}
}
private async void TakeMapOfflineButton_Click(object sender, EventArgs e)
{
// Create a path for the output mobile map.
string tempPath = $"{Path.GetTempPath()}";
string[] outputFolders = Directory.GetDirectories(tempPath, "NapervilleWaterNetwork*");
// Loop through the folder names and delete them.
foreach (string dir in outputFolders)
{
try
{
// Delete the folder.
Directory.Delete(dir, true);
}
catch (Exception)
{
// Ignore exceptions (files might be locked, for example).
}
}
// Create a new folder for the output mobile map.
_packagePath = Path.Combine(tempPath, @"NapervilleWaterNetwork");
int num = 1;
while (Directory.Exists(_packagePath))
{
_packagePath = Path.Combine(tempPath, @"NapervilleWaterNetwork" + num.ToString());
num++;
}
// Create the output directory.
Directory.CreateDirectory(_packagePath);
try
{
// Create an offline map task with the current (online) map.
_takeMapOfflineTask = await OfflineMapTask.CreateAsync(_mapView.Map);
// Create the default parameters for the task, pass in the area of interest.
_parameters = await _takeMapOfflineTask.CreateDefaultGenerateOfflineMapParametersAsync(_areaOfInterest);
// Get the overrides.
_overrides = await _takeMapOfflineTask.CreateGenerateOfflineMapParameterOverridesAsync(_parameters);
// Create the overrides UI.
ParameterOverrideFragment overlayFragment = new ParameterOverrideFragment(_overrides, _mapView.Map);
// Complete configuration when the dialog is closed.
overlayFragment.FinishedConfiguring += ConfigurationContinuation;
// Display the configuration window.
overlayFragment.Show(FragmentManager, "");
}
catch (Exception ex)
{
// Exception while taking the map offline.
ShowStatusMessage(ex.Message);
}
}
private async void ConfigurationContinuation(object sender, EventArgs e)
{
try
{
// Show the progress dialog while the job is running.
_alertDialog.Show();
// Create the job with the parameters and output location.
_generateOfflineMapJob = _takeMapOfflineTask.GenerateOfflineMap(_parameters, _packagePath, _overrides);
// Handle the progress changed event for the job.
_generateOfflineMapJob.ProgressChanged += OfflineMapJob_ProgressChanged;
// Await the job to generate geodatabases, export tile packages, and create the mobile map package.
GenerateOfflineMapResult results = await _generateOfflineMapJob.GetResultAsync();
// Check for job failure (writing the output was denied, e.g.).
if (_generateOfflineMapJob.Status != JobStatus.Succeeded)
{
// Report failure to the user.
ShowStatusMessage("Failed to take the map offline.");
}
// Check for errors with individual layers.
if (results.LayerErrors.Any())
{
// Build a string to show all layer errors.
System.Text.StringBuilder errorBuilder = new System.Text.StringBuilder();
foreach (KeyValuePair<Layer, Exception> layerError in results.LayerErrors)
{
errorBuilder.AppendLine($"{layerError.Key.Id} : {layerError.Value.Message}");
}
// Show layer errors.
ShowStatusMessage(errorBuilder.ToString());
}
// Display the offline map.
_mapView.Map = results.OfflineMap;
// Apply the original viewpoint for the offline map.
_mapView.SetViewpoint(new Viewpoint(_areaOfInterest));
// Enable map interaction so the user can explore the offline data.
_mapView.InteractionOptions.IsEnabled = true;
// Change the title and disable the "Take map offline" button.
_takeMapOfflineButton.Text = "Map is offline";
_takeMapOfflineButton.Enabled = false;
}
catch (TaskCanceledException)
{
// Generate offline map task was canceled.
ShowStatusMessage("Taking map offline was canceled");
}
catch (Exception ex)
{
// Exception while taking the map offline.
ShowStatusMessage(ex.Message);
}
finally
{
_alertDialog.Dismiss();
}
}
private void ShowStatusMessage(string message)
{
// Display the message to the user.
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.SetMessage(message).SetTitle("Alert").Show();
}
// Show changes in job progress.
private void OfflineMapJob_ProgressChanged(object sender, EventArgs e)
{
// Get the job.
GenerateOfflineMapJob job = sender as GenerateOfflineMapJob;
// Dispatch to the UI thread.
RunOnUiThread(() =>
{
// Show the percent complete and update the progress bar.
string percentText = job.Progress > 0 ? job.Progress.ToString() + " %" : string.Empty;
_progressIndicator.Progress = job.Progress;
_alertDialog.SetMessage($"Taking map offline ({percentText}) ...");
});
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using Hyak.Common;
using Microsoft.AzureStack.Management.Models;
namespace Microsoft.AzureStack.Management.Models
{
/// <summary>
/// Gallery item model.
/// </summary>
public partial class GalleryItem
{
private IDictionary<string, string> _additionalProperties;
/// <summary>
/// Optional. Additional properties.
/// </summary>
public IDictionary<string, string> AdditionalProperties
{
get { return this._additionalProperties; }
set { this._additionalProperties = value; }
}
private IList<string> _categoryIds;
/// <summary>
/// Optional. Category identities.
/// </summary>
public IList<string> CategoryIds
{
get { return this._categoryIds; }
set { this._categoryIds = value; }
}
private DefinitionTemplates _definitionTemplates;
/// <summary>
/// Optional. Definition templates.
/// </summary>
public DefinitionTemplates DefinitionTemplates
{
get { return this._definitionTemplates; }
set { this._definitionTemplates = value; }
}
private string _description;
/// <summary>
/// Optional. Description.
/// </summary>
public string Description
{
get { return this._description; }
set { this._description = value; }
}
private IDictionary<IconKind, string> _iconFileUris;
/// <summary>
/// Optional. Icon file Uris.
/// </summary>
public IDictionary<IconKind, string> IconFileUris
{
get { return this._iconFileUris; }
set { this._iconFileUris = value; }
}
private string _identity;
/// <summary>
/// Optional. Identity.
/// </summary>
public string Identity
{
get { return this._identity; }
set { this._identity = value; }
}
private string _itemDisplayName;
/// <summary>
/// Optional. Item display name.
/// </summary>
public string ItemDisplayName
{
get { return this._itemDisplayName; }
set { this._itemDisplayName = value; }
}
private string _itemName;
/// <summary>
/// Optional. Item name.
/// </summary>
public string ItemName
{
get { return this._itemName; }
set { this._itemName = value; }
}
private IList<LinkProperties> _links;
/// <summary>
/// Optional. Links.
/// </summary>
public IList<LinkProperties> Links
{
get { return this._links; }
set { this._links = value; }
}
private string _longSummary;
/// <summary>
/// Optional. Long summary.
/// </summary>
public string LongSummary
{
get { return this._longSummary; }
set { this._longSummary = value; }
}
private string _publisher;
/// <summary>
/// Optional. Publisher.
/// </summary>
public string Publisher
{
get { return this._publisher; }
set { this._publisher = value; }
}
private string _publisherDisplayName;
/// <summary>
/// Optional. Publisher display name.
/// </summary>
public string PublisherDisplayName
{
get { return this._publisherDisplayName; }
set { this._publisherDisplayName = value; }
}
private string _resourceGroupName;
/// <summary>
/// Optional. Resource group name.
/// </summary>
public string ResourceGroupName
{
get { return this._resourceGroupName; }
set { this._resourceGroupName = value; }
}
private IList<string> _screenshotUris;
/// <summary>
/// Optional. Screenshot Uris.
/// </summary>
public IList<string> ScreenshotUris
{
get { return this._screenshotUris; }
set { this._screenshotUris = value; }
}
private string _summary;
/// <summary>
/// Optional. Summary.
/// </summary>
public string Summary
{
get { return this._summary; }
set { this._summary = value; }
}
private string _version;
/// <summary>
/// Optional. Version.
/// </summary>
public string Version
{
get { return this._version; }
set { this._version = value; }
}
/// <summary>
/// Initializes a new instance of the GalleryItem class.
/// </summary>
public GalleryItem()
{
this.AdditionalProperties = new LazyDictionary<string, string>();
this.CategoryIds = new LazyList<string>();
this.IconFileUris = new LazyDictionary<IconKind, string>();
this.Links = new LazyList<LinkProperties>();
this.ScreenshotUris = new LazyList<string>();
}
}
}
| |
/*
* DocuSign REST API
*
* The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
*
* OpenAPI spec version: v2
* Contact: devcenter@docusign.com
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
namespace DocuSign.eSign.Model
{
/// <summary>
/// FolderItemResponse
/// </summary>
[DataContract]
public partial class FolderItemResponse : IEquatable<FolderItemResponse>, IValidatableObject
{
public FolderItemResponse()
{
// Empty Constructor
}
/// <summary>
/// Initializes a new instance of the <see cref="FolderItemResponse" /> class.
/// </summary>
/// <param name="EndPosition">The last position in the result set. .</param>
/// <param name="FolderItems">A list of the envelopes in the specified folder or folders. .</param>
/// <param name="NextUri">The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. .</param>
/// <param name="PreviousUri">The postal code for the billing address..</param>
/// <param name="ResultSetSize">The number of results returned in this response. .</param>
/// <param name="StartPosition">Starting position of the current result set..</param>
/// <param name="TotalRows">.</param>
public FolderItemResponse(string EndPosition = default(string), List<FolderItemV2> FolderItems = default(List<FolderItemV2>), string NextUri = default(string), string PreviousUri = default(string), string ResultSetSize = default(string), string StartPosition = default(string), string TotalRows = default(string))
{
this.EndPosition = EndPosition;
this.FolderItems = FolderItems;
this.NextUri = NextUri;
this.PreviousUri = PreviousUri;
this.ResultSetSize = ResultSetSize;
this.StartPosition = StartPosition;
this.TotalRows = TotalRows;
}
/// <summary>
/// The last position in the result set.
/// </summary>
/// <value>The last position in the result set. </value>
[DataMember(Name="endPosition", EmitDefaultValue=false)]
public string EndPosition { get; set; }
/// <summary>
/// A list of the envelopes in the specified folder or folders.
/// </summary>
/// <value>A list of the envelopes in the specified folder or folders. </value>
[DataMember(Name="folderItems", EmitDefaultValue=false)]
public List<FolderItemV2> FolderItems { get; set; }
/// <summary>
/// The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null.
/// </summary>
/// <value>The URI to the next chunk of records based on the search request. If the endPosition is the entire results of the search, this is null. </value>
[DataMember(Name="nextUri", EmitDefaultValue=false)]
public string NextUri { get; set; }
/// <summary>
/// The postal code for the billing address.
/// </summary>
/// <value>The postal code for the billing address.</value>
[DataMember(Name="previousUri", EmitDefaultValue=false)]
public string PreviousUri { get; set; }
/// <summary>
/// The number of results returned in this response.
/// </summary>
/// <value>The number of results returned in this response. </value>
[DataMember(Name="resultSetSize", EmitDefaultValue=false)]
public string ResultSetSize { get; set; }
/// <summary>
/// Starting position of the current result set.
/// </summary>
/// <value>Starting position of the current result set.</value>
[DataMember(Name="startPosition", EmitDefaultValue=false)]
public string StartPosition { get; set; }
/// <summary>
///
/// </summary>
/// <value></value>
[DataMember(Name="totalRows", EmitDefaultValue=false)]
public string TotalRows { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class FolderItemResponse {\n");
sb.Append(" EndPosition: ").Append(EndPosition).Append("\n");
sb.Append(" FolderItems: ").Append(FolderItems).Append("\n");
sb.Append(" NextUri: ").Append(NextUri).Append("\n");
sb.Append(" PreviousUri: ").Append(PreviousUri).Append("\n");
sb.Append(" ResultSetSize: ").Append(ResultSetSize).Append("\n");
sb.Append(" StartPosition: ").Append(StartPosition).Append("\n");
sb.Append(" TotalRows: ").Append(TotalRows).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as FolderItemResponse);
}
/// <summary>
/// Returns true if FolderItemResponse instances are equal
/// </summary>
/// <param name="other">Instance of FolderItemResponse to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(FolderItemResponse other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.EndPosition == other.EndPosition ||
this.EndPosition != null &&
this.EndPosition.Equals(other.EndPosition)
) &&
(
this.FolderItems == other.FolderItems ||
this.FolderItems != null &&
this.FolderItems.SequenceEqual(other.FolderItems)
) &&
(
this.NextUri == other.NextUri ||
this.NextUri != null &&
this.NextUri.Equals(other.NextUri)
) &&
(
this.PreviousUri == other.PreviousUri ||
this.PreviousUri != null &&
this.PreviousUri.Equals(other.PreviousUri)
) &&
(
this.ResultSetSize == other.ResultSetSize ||
this.ResultSetSize != null &&
this.ResultSetSize.Equals(other.ResultSetSize)
) &&
(
this.StartPosition == other.StartPosition ||
this.StartPosition != null &&
this.StartPosition.Equals(other.StartPosition)
) &&
(
this.TotalRows == other.TotalRows ||
this.TotalRows != null &&
this.TotalRows.Equals(other.TotalRows)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.EndPosition != null)
hash = hash * 59 + this.EndPosition.GetHashCode();
if (this.FolderItems != null)
hash = hash * 59 + this.FolderItems.GetHashCode();
if (this.NextUri != null)
hash = hash * 59 + this.NextUri.GetHashCode();
if (this.PreviousUri != null)
hash = hash * 59 + this.PreviousUri.GetHashCode();
if (this.ResultSetSize != null)
hash = hash * 59 + this.ResultSetSize.GetHashCode();
if (this.StartPosition != null)
hash = hash * 59 + this.StartPosition.GetHashCode();
if (this.TotalRows != null)
hash = hash * 59 + this.TotalRows.GetHashCode();
return hash;
}
}
public IEnumerable<ValidationResult> Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Web;
namespace Umbraco.Core.ObjectResolution
{
/// <summary>
/// The base class for all many-objects resolvers.
/// </summary>
/// <typeparam name="TResolver">The type of the concrete resolver class.</typeparam>
/// <typeparam name="TResolved">The type of the resolved objects.</typeparam>
public abstract class ManyObjectsResolverBase<TResolver, TResolved> : ResolverBase<TResolver>
where TResolved : class
where TResolver : ResolverBase
{
private IEnumerable<TResolved> _applicationInstances = null;
private readonly ReaderWriterLockSlim _lock = new ReaderWriterLockSlim();
private readonly string _httpContextKey;
private readonly List<Type> _instanceTypes = new List<Type>();
private IEnumerable<TResolved> _sortedValues = null;
private int _defaultPluginWeight = 10;
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="ManyObjectsResolverBase{TResolver, TResolved}"/> class with an empty list of objects,
/// and an optional lifetime scope.
/// </summary>
/// <param name="scope">The lifetime scope of instantiated objects, default is per Application.</param>
/// <remarks>If <paramref name="scope"/> is per HttpRequest then there must be a current HttpContext.</remarks>
/// <exception cref="InvalidOperationException"><paramref name="scope"/> is per HttpRequest but the current HttpContext is null.</exception>
protected ManyObjectsResolverBase(ObjectLifetimeScope scope = ObjectLifetimeScope.Application)
{
CanResolveBeforeFrozen = false;
if (scope == ObjectLifetimeScope.HttpRequest)
{
if (HttpContext.Current == null)
throw new InvalidOperationException("Use alternative constructor accepting a HttpContextBase object in order to set the lifetime scope to HttpRequest when HttpContext.Current is null");
CurrentHttpContext = new HttpContextWrapper(HttpContext.Current);
}
LifetimeScope = scope;
if (scope == ObjectLifetimeScope.HttpRequest)
_httpContextKey = this.GetType().FullName;
_instanceTypes = new List<Type>();
}
/// <summary>
/// Initializes a new instance of the <see cref="ManyObjectsResolverBase{TResolver, TResolved}"/> class with an empty list of objects,
/// with creation of objects based on an HttpRequest lifetime scope.
/// </summary>
/// <param name="httpContext">The HttpContextBase corresponding to the HttpRequest.</param>
/// <exception cref="ArgumentNullException"><paramref name="httpContext"/> is <c>null</c>.</exception>
protected ManyObjectsResolverBase(HttpContextBase httpContext)
{
CanResolveBeforeFrozen = false;
if (httpContext == null)
throw new ArgumentNullException("httpContext");
LifetimeScope = ObjectLifetimeScope.HttpRequest;
_httpContextKey = this.GetType().FullName;
CurrentHttpContext = httpContext;
_instanceTypes = new List<Type>();
}
/// <summary>
/// Initializes a new instance of the <see cref="ManyObjectsResolverBase{TResolver, TResolved}"/> class with an initial list of object types,
/// and an optional lifetime scope.
/// </summary>
/// <param name="value">The list of object types.</param>
/// <param name="scope">The lifetime scope of instantiated objects, default is per Application.</param>
/// <remarks>If <paramref name="scope"/> is per HttpRequest then there must be a current HttpContext.</remarks>
/// <exception cref="InvalidOperationException"><paramref name="scope"/> is per HttpRequest but the current HttpContext is null.</exception>
protected ManyObjectsResolverBase(IEnumerable<Type> value, ObjectLifetimeScope scope = ObjectLifetimeScope.Application)
: this(scope)
{
_instanceTypes = value.ToList();
}
/// <summary>
/// Initializes a new instance of the <see cref="ManyObjectsResolverBase{TResolver, TResolved}"/> class with an initial list of objects,
/// with creation of objects based on an HttpRequest lifetime scope.
/// </summary>
/// <param name="httpContext">The HttpContextBase corresponding to the HttpRequest.</param>
/// <param name="value">The list of object types.</param>
/// <exception cref="ArgumentNullException"><paramref name="httpContext"/> is <c>null</c>.</exception>
protected ManyObjectsResolverBase(HttpContextBase httpContext, IEnumerable<Type> value)
: this(httpContext)
{
_instanceTypes = value.ToList();
}
#endregion
/// <summary>
/// Gets or sets a value indicating whether the resolver can resolve objects before resolution is frozen.
/// </summary>
/// <remarks>This is false by default and is used for some special internal resolvers.</remarks>
internal bool CanResolveBeforeFrozen { get; set; }
/// <summary>
/// Gets the list of types to create instances from.
/// </summary>
protected virtual IEnumerable<Type> InstanceTypes
{
get { return _instanceTypes; }
}
/// <summary>
/// Gets or sets the <see cref="HttpContextBase"/> used to initialize this object, if any.
/// </summary>
/// <remarks>If not null, then <c>LifetimeScope</c> will be <c>ObjectLifetimeScope.HttpRequest</c>.</remarks>
protected HttpContextBase CurrentHttpContext { get; private set; }
/// <summary>
/// Gets or sets the lifetime scope of resolved objects.
/// </summary>
protected ObjectLifetimeScope LifetimeScope { get; private set; }
/// <summary>
/// Gets the resolved object instances, sorted by weight.
/// </summary>
/// <returns>The sorted resolved object instances.</returns>
/// <remarks>
/// <para>The order is based upon the <c>WeightedPluginAttribute</c> and <c>DefaultPluginWeight</c>.</para>
/// <para>Weights are sorted ascendingly (lowest weights come first).</para>
/// </remarks>
protected IEnumerable<TResolved> GetSortedValues()
{
if (_sortedValues == null)
{
var values = Values.ToList();
values.Sort((f1, f2) => GetObjectWeight(f1).CompareTo(GetObjectWeight(f2)));
_sortedValues = values;
}
return _sortedValues;
}
/// <summary>
/// Gets or sets the default type weight.
/// </summary>
/// <remarks>Determines the weight of types that do not have a <c>WeightedPluginAttribute</c> set on
/// them, when calling <c>GetSortedValues</c>.</remarks>
protected virtual int DefaultPluginWeight
{
get { return _defaultPluginWeight; }
set { _defaultPluginWeight = value; }
}
/// <summary>
/// Returns the weight of an object for user with GetSortedValues
/// </summary>
/// <param name="o"></param>
/// <returns></returns>
protected virtual int GetObjectWeight(object o)
{
var type = o.GetType();
var attr = type.GetCustomAttribute<WeightedPluginAttribute>(true);
return attr == null ? DefaultPluginWeight : attr.Weight;
}
/// <summary>
/// Gets the resolved object instances.
/// </summary>
/// <exception cref="InvalidOperationException"><c>CanResolveBeforeFrozen</c> is false, and resolution is not frozen.</exception>
protected IEnumerable<TResolved> Values
{
get
{
using (Resolution.Reader(CanResolveBeforeFrozen))
{
// note: we apply .ToArray() to the output of CreateInstance() because that is an IEnumerable that
// comes from the PluginManager we want to be _sure_ that it's not a Linq of some sort, but the
// instances have actually been instanciated when we return.
switch (LifetimeScope)
{
case ObjectLifetimeScope.HttpRequest:
// create new instances per HttpContext
using (var l = new UpgradeableReadLock(_lock))
{
// create if not already there
if (CurrentHttpContext.Items[_httpContextKey] == null)
{
l.UpgradeToWriteLock();
CurrentHttpContext.Items[_httpContextKey] = CreateInstances().ToArray();
}
return (TResolved[])CurrentHttpContext.Items[_httpContextKey];
}
case ObjectLifetimeScope.Application:
// create new instances per application
using (var l = new UpgradeableReadLock(_lock))
{
// create if not already there
if (_applicationInstances == null)
{
l.UpgradeToWriteLock();
_applicationInstances = CreateInstances().ToArray();
}
return _applicationInstances;
}
case ObjectLifetimeScope.Transient:
default:
// create new instances each time
return CreateInstances().ToArray();
}
}
}
}
/// <summary>
/// Creates the object instances for the types contained in the types collection.
/// </summary>
/// <returns>A list of objects of type <typeparamref name="TResolved"/>.</returns>
protected virtual IEnumerable<TResolved> CreateInstances()
{
return PluginManager.Current.CreateInstances<TResolved>(InstanceTypes);
}
#region Types collection manipulation
/// <summary>
/// Removes a type.
/// </summary>
/// <param name="value">The type to remove.</param>
/// <exception cref="InvalidOperationException">the resolver does not support removing types, or
/// the type is not a valid type for the resolver.</exception>
public virtual void RemoveType(Type value)
{
EnsureSupportsRemove();
using (Resolution.Configuration)
using (var l = new UpgradeableReadLock(_lock))
{
EnsureCorrectType(value);
l.UpgradeToWriteLock();
_instanceTypes.Remove(value);
}
}
/// <summary>
/// Removes a type.
/// </summary>
/// <typeparam name="T">The type to remove.</typeparam>
/// <exception cref="InvalidOperationException">the resolver does not support removing types, or
/// the type is not a valid type for the resolver.</exception>
public void RemoveType<T>()
where T : TResolved
{
RemoveType(typeof(T));
}
/// <summary>
/// Adds types.
/// </summary>
/// <param name="types">The types to add.</param>
/// <remarks>The types are appended at the end of the list.</remarks>
/// <exception cref="InvalidOperationException">the resolver does not support adding types, or
/// a type is not a valid type for the resolver, or a type is already in the collection of types.</exception>
protected void AddTypes(IEnumerable<Type> types)
{
EnsureSupportsAdd();
using (Resolution.Configuration)
using (new WriteLock(_lock))
{
foreach(var t in types)
{
EnsureCorrectType(t);
if (_instanceTypes.Contains(t))
{
throw new InvalidOperationException(string.Format(
"Type {0} is already in the collection of types.", t.FullName));
}
_instanceTypes.Add(t);
}
}
}
/// <summary>
/// Adds a type.
/// </summary>
/// <param name="value">The type to add.</param>
/// <remarks>The type is appended at the end of the list.</remarks>
/// <exception cref="InvalidOperationException">the resolver does not support adding types, or
/// the type is not a valid type for the resolver, or the type is already in the collection of types.</exception>
public virtual void AddType(Type value)
{
EnsureSupportsAdd();
using (Resolution.Configuration)
using (var l = new UpgradeableReadLock(_lock))
{
EnsureCorrectType(value);
if (_instanceTypes.Contains(value))
{
throw new InvalidOperationException(string.Format(
"Type {0} is already in the collection of types.", value.FullName));
}
l.UpgradeToWriteLock();
_instanceTypes.Add(value);
}
}
/// <summary>
/// Adds a type.
/// </summary>
/// <typeparam name="T">The type to add.</typeparam>
/// <remarks>The type is appended at the end of the list.</remarks>
/// <exception cref="InvalidOperationException">the resolver does not support adding types, or
/// the type is not a valid type for the resolver, or the type is already in the collection of types.</exception>
public void AddType<T>()
where T : TResolved
{
AddType(typeof(T));
}
/// <summary>
/// Clears the list of types
/// </summary>
/// <exception cref="InvalidOperationException">the resolver does not support clearing types.</exception>
public virtual void Clear()
{
EnsureSupportsClear();
using (Resolution.Configuration)
using (new WriteLock(_lock))
{
_instanceTypes.Clear();
}
}
/// <summary>
/// WARNING! Do not use this unless you know what you are doing, clear all types registered and instances
/// created. Typically only used if a resolver is no longer used in an application and memory is to be GC'd
/// </summary>
internal void ResetCollections()
{
using (new WriteLock(_lock))
{
_instanceTypes.Clear();
_sortedValues = null;
_applicationInstances = null;
}
}
/// <summary>
/// Inserts a type at the specified index.
/// </summary>
/// <param name="index">The zero-based index at which the type should be inserted.</param>
/// <param name="value">The type to insert.</param>
/// <exception cref="InvalidOperationException">the resolver does not support inserting types, or
/// the type is not a valid type for the resolver, or the type is already in the collection of types.</exception>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is out of range.</exception>
public virtual void InsertType(int index, Type value)
{
EnsureSupportsInsert();
using (Resolution.Configuration)
using (var l = new UpgradeableReadLock(_lock))
{
EnsureCorrectType(value);
if (_instanceTypes.Contains(value))
{
throw new InvalidOperationException(string.Format(
"Type {0} is already in the collection of types.", value.FullName));
}
l.UpgradeToWriteLock();
_instanceTypes.Insert(index, value);
}
}
/// <summary>
/// Inserts a type at the beginning of the list.
/// </summary>
/// <param name="value">The type to insert.</param>
/// <exception cref="InvalidOperationException">the resolver does not support inserting types, or
/// the type is not a valid type for the resolver, or the type is already in the collection of types.</exception>
public virtual void InsertType(Type value)
{
InsertType(0, value);
}
/// <summary>
/// Inserts a type at the specified index.
/// </summary>
/// <typeparam name="T">The type to insert.</typeparam>
/// <param name="index">The zero-based index at which the type should be inserted.</param>
/// <exception cref="ArgumentOutOfRangeException"><paramref name="index"/> is out of range.</exception>
public void InsertType<T>(int index)
where T : TResolved
{
InsertType(index, typeof(T));
}
/// <summary>
/// Inserts a type at the beginning of the list.
/// </summary>
/// <typeparam name="T">The type to insert.</typeparam>
public void InsertType<T>()
where T : TResolved
{
InsertType(0, typeof(T));
}
/// <summary>
/// Inserts a type before a specified, already existing type.
/// </summary>
/// <param name="existingType">The existing type before which to insert.</param>
/// <param name="value">The type to insert.</param>
/// <exception cref="InvalidOperationException">the resolver does not support inserting types, or
/// one of the types is not a valid type for the resolver, or the existing type is not in the collection,
/// or the new type is already in the collection of types.</exception>
public virtual void InsertTypeBefore(Type existingType, Type value)
{
EnsureSupportsInsert();
using (Resolution.Configuration)
using (var l = new UpgradeableReadLock(_lock))
{
EnsureCorrectType(existingType);
EnsureCorrectType(value);
if (!_instanceTypes.Contains(existingType))
{
throw new InvalidOperationException(string.Format(
"Type {0} is not in the collection of types.", existingType.FullName));
}
if (_instanceTypes.Contains(value))
{
throw new InvalidOperationException(string.Format(
"Type {0} is already in the collection of types.", value.FullName));
}
int index = _instanceTypes.IndexOf(existingType);
l.UpgradeToWriteLock();
_instanceTypes.Insert(index, value);
}
}
/// <summary>
/// Inserts a type before a specified, already existing type.
/// </summary>
/// <typeparam name="TExisting">The existing type before which to insert.</typeparam>
/// <typeparam name="T">The type to insert.</typeparam>
/// <exception cref="InvalidOperationException">the resolver does not support inserting types, or
/// one of the types is not a valid type for the resolver, or the existing type is not in the collection,
/// or the new type is already in the collection of types.</exception>
public void InsertTypeBefore<TExisting, T>()
where TExisting : TResolved
where T : TResolved
{
InsertTypeBefore(typeof(TExisting), typeof(T));
}
/// <summary>
/// Returns a value indicating whether the specified type is already in the collection of types.
/// </summary>
/// <param name="value">The type to look for.</param>
/// <returns>A value indicating whether the type is already in the collection of types.</returns>
public virtual bool ContainsType(Type value)
{
using (new ReadLock(_lock))
{
return _instanceTypes.Contains(value);
}
}
/// <summary>
/// Gets the types in the collection of types.
/// </summary>
/// <returns>The types in the collection of types.</returns>
/// <remarks>Returns an enumeration, the list cannot be modified.</remarks>
public virtual IEnumerable<Type> GetTypes()
{
Type[] types;
using (new ReadLock(_lock))
{
types = _instanceTypes.ToArray();
}
return types;
}
/// <summary>
/// Returns a value indicating whether the specified type is already in the collection of types.
/// </summary>
/// <typeparam name="T">The type to look for.</typeparam>
/// <returns>A value indicating whether the type is already in the collection of types.</returns>
public bool ContainsType<T>()
where T : TResolved
{
return ContainsType(typeof(T));
}
#endregion
/// <summary>
/// Returns a WriteLock to use when modifying collections
/// </summary>
/// <returns></returns>
protected WriteLock GetWriteLock()
{
return new WriteLock(_lock);
}
#region Type utilities
/// <summary>
/// Ensures that a type is a valid type for the resolver.
/// </summary>
/// <param name="value">The type to test.</param>
/// <exception cref="InvalidOperationException">the type is not a valid type for the resolver.</exception>
protected virtual void EnsureCorrectType(Type value)
{
if (!TypeHelper.IsTypeAssignableFrom<TResolved>(value))
throw new InvalidOperationException(string.Format(
"Type {0} is not an acceptable type for resolver {1}.", value.FullName, this.GetType().FullName));
}
#endregion
#region Types collection manipulation support
/// <summary>
/// Ensures that the resolver supports removing types.
/// </summary>
/// <exception cref="InvalidOperationException">The resolver does not support removing types.</exception>
protected void EnsureSupportsRemove()
{
if (!SupportsRemove)
throw new InvalidOperationException("This resolver does not support removing types");
}
/// <summary>
/// Ensures that the resolver supports clearing types.
/// </summary>
/// <exception cref="InvalidOperationException">The resolver does not support clearing types.</exception>
protected void EnsureSupportsClear() {
if (!SupportsClear)
throw new InvalidOperationException("This resolver does not support clearing types");
}
/// <summary>
/// Ensures that the resolver supports adding types.
/// </summary>
/// <exception cref="InvalidOperationException">The resolver does not support adding types.</exception>
protected void EnsureSupportsAdd()
{
if (!SupportsAdd)
throw new InvalidOperationException("This resolver does not support adding new types");
}
/// <summary>
/// Ensures that the resolver supports inserting types.
/// </summary>
/// <exception cref="InvalidOperationException">The resolver does not support inserting types.</exception>
protected void EnsureSupportsInsert()
{
if (!SupportsInsert)
throw new InvalidOperationException("This resolver does not support inserting new types");
}
/// <summary>
/// Gets a value indicating whether the resolver supports adding types.
/// </summary>
protected virtual bool SupportsAdd
{
get { return true; }
}
/// <summary>
/// Gets a value indicating whether the resolver supports inserting types.
/// </summary>
protected virtual bool SupportsInsert
{
get { return true; }
}
/// <summary>
/// Gets a value indicating whether the resolver supports clearing types.
/// </summary>
protected virtual bool SupportsClear
{
get { return true; }
}
/// <summary>
/// Gets a value indicating whether the resolver supports removing types.
/// </summary>
protected virtual bool SupportsRemove
{
get { return true; }
}
#endregion
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="GraphUnzipSpec.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Collections.Generic;
using Akka.Streams.Dsl;
using Akka.Streams.TestKit;
using Akka.Streams.TestKit.Tests;
using Xunit;
using Xunit.Abstractions;
// ReSharper disable InvokeAsExtensionMethod
namespace Akka.Streams.Tests.Dsl
{
public class GraphUnzipSpec : AkkaSpec
{
private ActorMaterializer Materializer { get; }
public GraphUnzipSpec(ITestOutputHelper helper) : base (helper)
{
var settings = ActorMaterializerSettings.Create(Sys).WithInputBuffer(2, 16);
Materializer = ActorMaterializer.Create(Sys, settings);
}
[Fact]
public void A_Unzip_must_unzip_to_two_subscribers()
{
this.AssertAllStagesStopped(() =>
{
var c1 = TestSubscriber.CreateManualProbe<int>(this);
var c2 = TestSubscriber.CreateManualProbe<string>(this);
RunnableGraph.FromGraph(GraphDsl.Create(b =>
{
var unzip = b.Add(new UnZip<int, string>());
var source =
Source.From(new[]
{
new KeyValuePair<int, string>(1, "a"),
new KeyValuePair<int, string>(2, "b"),
new KeyValuePair<int, string>(3, "c")
});
b.From(source).To(unzip.In);
b.From(unzip.Out0)
.Via(Flow.Create<int>().Buffer(16, OverflowStrategy.Backpressure).Select(x => x*2))
.To(Sink.FromSubscriber(c1));
b.From(unzip.Out1)
.Via(Flow.Create<string>().Buffer(16, OverflowStrategy.Backpressure))
.To(Sink.FromSubscriber(c2));
return ClosedShape.Instance;
})).Run(Materializer);
var sub1 = c1.ExpectSubscription();
var sub2 = c2.ExpectSubscription();
sub1.Request(1);
sub2.Request(2);
c1.ExpectNext(1*2);
c1.ExpectNoMsg(TimeSpan.FromMilliseconds(100));
c2.ExpectNext("a", "b");
c2.ExpectNoMsg(TimeSpan.FromMilliseconds(100));
sub1.Request(3);
c1.ExpectNext(2*2, 3*2);
c1.ExpectComplete();
sub2.Request(3);
c2.ExpectNext("c");
c2.ExpectComplete();
}, Materializer);
}
[Fact]
public void A_Unzip_must_produce_to_right_downstream_even_though_left_downstream_cancels()
{
this.AssertAllStagesStopped(() =>
{
var c1 = TestSubscriber.CreateManualProbe<int>(this);
var c2 = TestSubscriber.CreateManualProbe<string>(this);
RunnableGraph.FromGraph(GraphDsl.Create(b =>
{
var unzip = b.Add(new UnZip<int, string>());
var source =
Source.From(new[]
{
new KeyValuePair<int, string>(1, "a"),
new KeyValuePair<int, string>(2, "b"),
new KeyValuePair<int, string>(3, "c")
});
b.From(source).To(unzip.In);
b.From(unzip.Out0).To(Sink.FromSubscriber(c1));
b.From(unzip.Out1).To(Sink.FromSubscriber(c2));
return ClosedShape.Instance;
})).Run(Materializer);
var sub1 = c1.ExpectSubscription();
var sub2 = c2.ExpectSubscription();
sub1.Cancel();
sub2.Request(3);
c2.ExpectNext("a", "b", "c");
c2.ExpectComplete();
}, Materializer);
}
[Fact]
public void A_Unzip_must_produce_to_left_downstream_even_though_right_downstream_cancels()
{
this.AssertAllStagesStopped(() =>
{
var c1 = TestSubscriber.CreateManualProbe<int>(this);
var c2 = TestSubscriber.CreateManualProbe<string>(this);
RunnableGraph.FromGraph(GraphDsl.Create(b =>
{
var unzip = b.Add(new UnZip<int, string>());
var source =
Source.From(new[]
{
new KeyValuePair<int, string>(1, "a"),
new KeyValuePair<int, string>(2, "b"),
new KeyValuePair<int, string>(3, "c")
});
b.From(source).To(unzip.In);
b.From(unzip.Out0).To(Sink.FromSubscriber(c1));
b.From(unzip.Out1).To(Sink.FromSubscriber(c2));
return ClosedShape.Instance;
})).Run(Materializer);
var sub1 = c1.ExpectSubscription();
var sub2 = c2.ExpectSubscription();
sub2.Cancel();
sub1.Request(3);
c1.ExpectNext(1, 2, 3);
c1.ExpectComplete();
}, Materializer);
}
[Fact]
public void A_Unzip_must_cancel_upstream_when_downstream_cancel()
{
this.AssertAllStagesStopped(() =>
{
var p1 = TestPublisher.CreateManualProbe<KeyValuePair<int, string>>(this);
var c1 = TestSubscriber.CreateManualProbe<int>(this);
var c2 = TestSubscriber.CreateManualProbe<string>(this);
RunnableGraph.FromGraph(GraphDsl.Create(b =>
{
var unzip = b.Add(new UnZip<int, string>());
var source = Source.FromPublisher(p1.Publisher);
b.From(source).To(unzip.In);
b.From(unzip.Out0).To(Sink.FromSubscriber(c1));
b.From(unzip.Out1).To(Sink.FromSubscriber(c2));
return ClosedShape.Instance;
})).Run(Materializer);
var p1Sub = p1.ExpectSubscription();
var sub1 = c1.ExpectSubscription();
var sub2 = c2.ExpectSubscription();
sub1.Request(3);
sub2.Request(3);
p1.ExpectRequest(p1Sub, 16);
p1Sub.SendNext(new KeyValuePair<int, string>(1, "a"));
c1.ExpectNext(1);
c2.ExpectNext("a");
p1Sub.SendNext(new KeyValuePair<int, string>(2, "b"));
c1.ExpectNext(2);
c2.ExpectNext("b");
sub1.Cancel();
sub2.Cancel();
p1Sub.ExpectCancellation();
}, Materializer);
}
[Fact]
public void A_Unzip_must_work_with_Zip()
{
this.AssertAllStagesStopped(() =>
{
var c1 = TestSubscriber.CreateManualProbe<Tuple<int, string>>(this);
RunnableGraph.FromGraph(GraphDsl.Create(b =>
{
var zip = b.Add(new Zip<int, string>());
var unzip = b.Add(new UnZip<int, string>());
var source =
Source.From(new[]
{
new KeyValuePair<int, string>(1, "a"),
new KeyValuePair<int, string>(2, "b"),
new KeyValuePair<int, string>(3, "c")
});
b.From(source).To(unzip.In);
b.From(unzip.Out0).To(zip.In0);
b.From(unzip.Out1).To(zip.In1);
b.From(zip.Out).To(Sink.FromSubscriber(c1));
return ClosedShape.Instance;
})).Run(Materializer);
var sub1 = c1.ExpectSubscription();
sub1.Request(5);
c1.ExpectNext(Tuple.Create(1, "a"));
c1.ExpectNext(Tuple.Create(2, "b"));
c1.ExpectNext(Tuple.Create(3, "c"));
c1.ExpectComplete();
}, Materializer);
}
}
}
| |
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.IO;
using System.Runtime.InteropServices;
using SharpDX.Mathematics.Interop;
namespace SharpDX.Direct3D9
{
public partial class Texture
{
/// <summary>
/// Initializes a new instance of the <see cref="Texture"/> class.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <unmanaged>HRESULT IDirect3DDevice9::CreateTexture([In] unsigned int Width,[In] unsigned int Height,[In] unsigned int Levels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[Out, Fast] IDirect3DTexture9** ppTexture,[In] void** pSharedHandle)</unmanaged>
public Texture(Device device, int width, int height, int levelCount, Usage usage, Format format, Pool pool)
: base(IntPtr.Zero)
{
device.CreateTexture(width, height, levelCount, (int)usage, format, pool, this, IntPtr.Zero);
}
/// <summary>
/// Initializes a new instance of the <see cref="Texture"/> class.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="sharedHandle">The shared handle.</param>
/// <unmanaged>HRESULT IDirect3DDevice9::CreateTexture([In] unsigned int Width,[In] unsigned int Height,[In] unsigned int Levels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[Out, Fast] IDirect3DTexture9** ppTexture,[In] void** pSharedHandle)</unmanaged>
public Texture(Device device, int width, int height, int levelCount, Usage usage, Format format, Pool pool, ref IntPtr sharedHandle)
: base(IntPtr.Zero)
{
unsafe
{
fixed (void* pSharedHandle = &sharedHandle)
device.CreateTexture(width, height, levelCount, (int)usage, format, pool, this, new IntPtr(pSharedHandle));
}
}
/// <summary>
/// Checks texture-creation parameters.
/// </summary>
/// <param name="device">Device associated with the texture.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="mipLevelCount">Requested number of mipmap levels for the texture.</param>
/// <param name="usage">The requested usage for the texture.</param>
/// <param name="format">Requested format for the texture.</param>
/// <param name="pool">Memory class where the resource will be placed.</param>
/// <returns>
/// A value type containing the proposed values to pass to the texture creation functions.
/// </returns>
/// <unmanaged>HRESULT D3DXCheckTextureRequirements([In] IDirect3DDevice9* pDevice,[InOut] unsigned int* pWidth,[InOut] unsigned int* pHeight,[InOut] unsigned int* pNumMipLevels,[In] unsigned int Usage,[InOut] D3DFORMAT* pFormat,[In] D3DPOOL Pool)</unmanaged>
public static TextureRequirements CheckRequirements(Device device, int width, int height, int mipLevelCount, Usage usage, Format format, Pool pool)
{
var result = new TextureRequirements();
D3DX9.CheckTextureRequirements(device, ref result.Width, ref result.Height, ref result.MipLevelCount, (int)usage, ref result.Format, pool);
return result;
}
/// <summary>
/// Computes the normal map.
/// </summary>
/// <param name="texture">The texture.</param>
/// <param name="sourceTexture">The source texture.</param>
/// <param name="flags">The flags.</param>
/// <param name="channel">The channel.</param>
/// <param name="amplitude">The amplitude.</param>
/// <returns>A <see cref="SharpDX.Result" /> object describing the result of the operation.</returns>
/// <unmanaged>HRESULT D3DXComputeNormalMap([In] IDirect3DTexture9* pTexture,[In] IDirect3DTexture9* pSrcTexture,[Out, Buffer] const PALETTEENTRY* pSrcPalette,[In] unsigned int Flags,[In] unsigned int Channel,[In] float Amplitude)</unmanaged>
public static void ComputeNormalMap(Texture texture, Texture sourceTexture, NormalMapFlags flags, Channel channel, float amplitude)
{
D3DX9.ComputeNormalMap(texture, sourceTexture, null, (int)flags, (int)channel, amplitude);
}
/// <summary>
/// Computes the normal map.
/// </summary>
/// <param name="texture">The texture.</param>
/// <param name="sourceTexture">The source texture.</param>
/// <param name="palette">The palette.</param>
/// <param name="flags">The flags.</param>
/// <param name="channel">The channel.</param>
/// <param name="amplitude">The amplitude.</param>
/// <returns>A <see cref="SharpDX.Result" /> object describing the result of the operation.</returns>
/// <unmanaged>HRESULT D3DXComputeNormalMap([In] IDirect3DTexture9* pTexture,[In] IDirect3DTexture9* pSrcTexture,[Out, Buffer] const PALETTEENTRY* pSrcPalette,[In] unsigned int Flags,[In] unsigned int Channel,[In] float Amplitude)</unmanaged>
public static void ComputeNormalMap(Texture texture, Texture sourceTexture, PaletteEntry[] palette, NormalMapFlags flags, Channel channel, float amplitude)
{
D3DX9.ComputeNormalMap(texture, sourceTexture, palette, (int)flags, (int)channel, amplitude);
}
/// <summary>
/// Uses a user-provided function to fill each texel of each mip level of a given texture.
/// </summary>
/// <param name="callback">A function that is used to fill the texture.</param>
/// <returns>A <see cref="SharpDX.Result" /> object describing the result of the operation.</returns>
/// <unmanaged>HRESULT D3DXFillTexture([In] IDirect3DTexture9* pTexture,[In] __function__stdcall* pFunction,[In] void* pData)</unmanaged>
public void Fill(Fill2DCallback callback)
{
var handle = GCHandle.Alloc(callback);
try
{
D3DX9.FillTexture(this, FillCallbackHelper.Native2DCallbackPtr, GCHandle.ToIntPtr(handle));
}
finally
{
handle.Free();
}
}
/// <summary>
/// Uses a compiled high-level shader language (HLSL) function to fill each texel of each mipmap level of a texture.
/// </summary>
/// <param name="shader">A texture shader object that is used to fill the texture.</param>
/// <returns>A <see cref="SharpDX.Result" /> object describing the result of the operation.</returns>
/// <unmanaged>HRESULT D3DXFillTextureTX([In] IDirect3DTexture9* pTexture,[In] ID3DXTextureShader* pTextureShader)</unmanaged>
public void Fill(TextureShader shader)
{
D3DX9.FillTextureTX(this, shader);
}
/// <summary>
/// Locks a rectangle on a texture resource.
/// </summary>
/// <param name="level">The level.</param>
/// <param name="flags">The flags.</param>
/// <returns>
/// A <see cref="DataRectangle"/> describing the region locked.
/// </returns>
/// <unmanaged>HRESULT IDirect3DTexture9::LockRect([In] unsigned int Level,[Out] D3DLOCKED_RECT* pLockedRect,[In] const void* pRect,[In] D3DLOCK Flags)</unmanaged>
public DataRectangle LockRectangle(int level, SharpDX.Direct3D9.LockFlags flags)
{
LockedRectangle lockedRect;
LockRectangle(level, out lockedRect, IntPtr.Zero, flags);
return new DataRectangle(lockedRect.PBits, lockedRect.Pitch);
}
/// <summary>
/// Locks a rectangle on a texture resource.
/// </summary>
/// <param name="level">The level.</param>
/// <param name="flags">The flags.</param>
/// <param name="stream">The stream pointing to the locked region.</param>
/// <returns>
/// A <see cref="DataRectangle"/> describing the region locked.
/// </returns>
/// <unmanaged>HRESULT IDirect3DTexture9::LockRect([In] unsigned int Level,[Out] D3DLOCKED_RECT* pLockedRect,[In] const void* pRect,[In] D3DLOCK Flags)</unmanaged>
public DataRectangle LockRectangle(int level, SharpDX.Direct3D9.LockFlags flags, out DataStream stream)
{
LockedRectangle lockedRect;
LockRectangle(level, out lockedRect, IntPtr.Zero, flags);
stream = new DataStream(lockedRect.PBits, lockedRect.Pitch * GetLevelDescription(level).Height, true, (flags & LockFlags.ReadOnly) == 0);
return new DataRectangle(lockedRect.PBits, lockedRect.Pitch);
}
/// <summary>
/// Locks a rectangle on a texture resource.
/// </summary>
/// <param name="level">The level.</param>
/// <param name="rectangle">The rectangle.</param>
/// <param name="flags">The flags.</param>
/// <returns>
/// A <see cref="DataRectangle"/> describing the region locked.
/// </returns>
/// <unmanaged>HRESULT IDirect3DTexture9::LockRect([In] D3DCUBEMAP_FACES FaceType,[In] unsigned int Level,[In] D3DLOCKED_RECT* pLockedRect,[In] const void* pRect,[In] D3DLOCK Flags)</unmanaged>
public DataRectangle LockRectangle(int level, RawRectangle rectangle, SharpDX.Direct3D9.LockFlags flags)
{
unsafe
{
LockedRectangle lockedRect;
LockRectangle(level, out lockedRect, new IntPtr(&rectangle), flags);
return new DataRectangle(lockedRect.PBits, lockedRect.Pitch);
}
}
/// <summary>
/// Locks a rectangle on a texture resource.
/// </summary>
/// <param name="level">The level.</param>
/// <param name="rectangle">The rectangle.</param>
/// <param name="flags">The flags.</param>
/// <param name="stream">The stream pointing to the locked region.</param>
/// <returns>
/// A <see cref="DataRectangle"/> describing the region locked.
/// </returns>
/// <unmanaged>HRESULT IDirect3DTexture9::LockRect([In] D3DCUBEMAP_FACES FaceType,[In] unsigned int Level,[In] D3DLOCKED_RECT* pLockedRect,[In] const void* pRect,[In] D3DLOCK Flags)</unmanaged>
public DataRectangle LockRectangle(int level, RawRectangle rectangle, SharpDX.Direct3D9.LockFlags flags, out DataStream stream)
{
unsafe
{
LockedRectangle lockedRect;
LockRectangle(level, out lockedRect, new IntPtr(&rectangle), flags);
stream = new DataStream(lockedRect.PBits, lockedRect.Pitch * GetLevelDescription(level).Height, true, (flags & LockFlags.ReadOnly) == 0);
return new DataRectangle(lockedRect.PBits, lockedRect.Pitch);
}
}
/// <summary>
/// Adds a dirty region to a texture resource.
/// </summary>
/// <returns>
/// A <see cref="SharpDX.Result"/> object describing the result of the operation.
/// </returns>
/// <unmanaged>HRESULT IDirect3DTexture9::AddDirtyRect([In] const void* pDirtyRect)</unmanaged>
public void AddDirtyRectangle()
{
AddDirtyRectangle(IntPtr.Zero);
}
/// <summary>
/// Adds a dirty region to a texture resource.
/// </summary>
/// <param name="dirtyRectRef">The dirty rect ref.</param>
/// <returns>
/// A <see cref="SharpDX.Result"/> object describing the result of the operation.
/// </returns>
/// <unmanaged>HRESULT IDirect3DTexture9::AddDirtyRect([In] const void* pDirtyRect)</unmanaged>
public void AddDirtyRectangle(RawRectangle dirtyRectRef)
{
unsafe
{
AddDirtyRectangle(new IntPtr(&dirtyRectRef));
}
}
/// <summary>
/// Creates a <see cref="Texture"/> from a file
/// </summary>
/// <param name="device">The device.</param>
/// <param name="filename">The filename.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileExW([In] IDirect3DDevice9* pDevice,[In] const wchar_t* pSrcFile,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[In] void* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromFile(Device device, string filename)
{
Texture cubeTexture;
D3DX9.CreateTextureFromFileW(device, filename, out cubeTexture);
return cubeTexture;
}
/// <summary>
/// Creates a <see cref="Texture"/> from a file
/// </summary>
/// <param name="device">The device.</param>
/// <param name="filename">The filename.</param>
/// <param name="usage">The usage.</param>
/// <param name="pool">The pool.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileExW([In] IDirect3DDevice9* pDevice,[In] const wchar_t* pSrcFile,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[In] void* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromFile(Device device, string filename, Usage usage, Pool pool)
{
return FromFile(device, filename, -1, -1, -1, usage, Format.Unknown, pool, Filter.Default, Filter.Default, 0);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a file
/// </summary>
/// <param name="device">The device.</param>
/// <param name="filename">The filename.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileExW([In] IDirect3DDevice9* pDevice,[In] const wchar_t* pSrcFile,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[In] void* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromFile(Device device, string filename, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey)
{
return CreateFromFile(device, filename, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, IntPtr.Zero, null);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a file
/// </summary>
/// <param name="device">The device.</param>
/// <param name="filename">The filename.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileExW([In] IDirect3DDevice9* pDevice,[In] const wchar_t* pSrcFile,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[In] void* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static unsafe Texture FromFile(Device device, string filename, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, out ImageInformation imageInformation)
{
fixed (void* pImageInfo = &imageInformation)
return CreateFromFile(device, filename, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, (IntPtr)pImageInfo, null);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a file
/// </summary>
/// <param name="device">The device.</param>
/// <param name="filename">The filename.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileExW([In] IDirect3DDevice9* pDevice,[In] const wchar_t* pSrcFile,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[In] void* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static unsafe Texture FromFile(Device device, string filename, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, out ImageInformation imageInformation, out PaletteEntry[] palette)
{
palette = new PaletteEntry[256];
fixed (void* pImageInfo = &imageInformation)
return CreateFromFile(device, filename, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, (IntPtr)pImageInfo, palette);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a memory buffer.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="buffer">The buffer.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemory([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromMemory(Device device, byte[] buffer)
{
Texture cubeTexture;
unsafe
{
fixed (void* pData = buffer)
D3DX9.CreateTextureFromFileInMemory(device, (IntPtr)pData, buffer.Length, out cubeTexture);
}
return cubeTexture;
}
/// <summary>
/// Creates a <see cref="Texture"/> from a memory buffer.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="usage">The usage.</param>
/// <param name="pool">The pool.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromMemory(Device device, byte[] buffer, Usage usage, Pool pool)
{
return FromMemory(device, buffer, -1, -1, -1, usage, Format.Unknown, pool, Filter.Default, Filter.Default, 0);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a memory buffer.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromMemory(Device device, byte[] buffer, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey)
{
return CreateFromMemory(device, buffer, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, IntPtr.Zero, null);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a memory buffer.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static unsafe Texture FromMemory(Device device, byte[] buffer, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, out ImageInformation imageInformation)
{
fixed (void* pImageInfo = &imageInformation)
return CreateFromMemory(device, buffer, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, (IntPtr)pImageInfo, null);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a memory buffer.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static unsafe Texture FromMemory(Device device, byte[] buffer, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, out ImageInformation imageInformation, out PaletteEntry[] palette)
{
palette = new PaletteEntry[256];
fixed (void* pImageInfo = &imageInformation)
return CreateFromMemory(device, buffer, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, (IntPtr)pImageInfo, palette);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <returns>A <see cref="Texture"/></returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemory([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromStream(Device device, Stream stream)
{
Texture cubeTexture;
if (stream is DataStream)
{
D3DX9.CreateTextureFromFileInMemory(device, ((DataStream)stream).PositionPointer, (int)(stream.Length - stream.Position), out cubeTexture);
}
else
{
unsafe
{
var data = Utilities.ReadStream(stream);
fixed (void* pData = data)
D3DX9.CreateTextureFromFileInMemory(device, (IntPtr)pData, data.Length, out cubeTexture);
}
}
return cubeTexture;
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <param name="usage">The usage.</param>
/// <param name="pool">The pool.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromStream(Device device, Stream stream, Usage usage, Pool pool)
{
return FromStream(device, stream, 0, -1, -1, usage, Format.Unknown, pool, Filter.Default, Filter.Default, 0);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromStream(Device device, Stream stream, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey)
{
return FromStream(device, stream, 0, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <param name="sizeBytes">The size bytes.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static Texture FromStream(Device device, Stream stream, int sizeBytes, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey)
{
return CreateFromStream(device, stream, sizeBytes, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, IntPtr.Zero, null);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <param name="sizeBytes">The size bytes.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static unsafe Texture FromStream(Device device, Stream stream, int sizeBytes, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, out ImageInformation imageInformation)
{
fixed (void* pImageInfo = &imageInformation)
return CreateFromStream(device, stream, sizeBytes, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, (IntPtr)pImageInfo, null);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <param name="sizeBytes">The size bytes.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
public static unsafe Texture FromStream(Device device, Stream stream, int sizeBytes, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, out ImageInformation imageInformation, out PaletteEntry[] palette)
{
palette = new PaletteEntry[256];
fixed (void* pImageInfo = &imageInformation)
return CreateFromStream(device, stream, sizeBytes, width, height, levelCount, usage, format, pool, filter, mipFilter, colorKey, (IntPtr)pImageInfo, palette);
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="buffer">The buffer.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
private static unsafe Texture CreateFromMemory(Device device, byte[] buffer, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, IntPtr imageInformation, PaletteEntry[] palette)
{
Texture cubeTexture;
fixed (void* pBuffer = buffer)
cubeTexture = CreateFromPointer(
device,
(IntPtr)pBuffer,
buffer.Length,
width,
height,
levelCount,
usage,
format,
pool,
filter,
mipFilter,
colorKey,
imageInformation,
palette
);
return cubeTexture;
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="stream">The stream.</param>
/// <param name="sizeBytes">The size bytes.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>A <see cref="Texture"/></returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
private static unsafe Texture CreateFromStream(Device device, Stream stream, int sizeBytes, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, IntPtr imageInformation, PaletteEntry[] palette)
{
Texture cubeTexture;
sizeBytes = sizeBytes == 0 ? (int)(stream.Length - stream.Position): sizeBytes;
if (stream is DataStream)
{
cubeTexture = CreateFromPointer(
device,
((DataStream)stream).PositionPointer,
sizeBytes,
width,
height,
levelCount,
usage,
format,
pool,
filter,
mipFilter,
colorKey,
imageInformation,
palette
);
}
else
{
var data = Utilities.ReadStream(stream);
fixed (void* pData = data)
cubeTexture = CreateFromPointer(
device,
(IntPtr)pData,
data.Length,
width,
height,
levelCount,
usage,
format,
pool,
filter,
mipFilter,
colorKey,
imageInformation,
palette
);
}
stream.Position = sizeBytes;
return cubeTexture;
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="pointer">The pointer.</param>
/// <param name="sizeInBytes">The size in bytes.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
private static unsafe Texture CreateFromPointer(Device device, IntPtr pointer, int sizeInBytes, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, IntPtr imageInformation, PaletteEntry[] palette)
{
Texture cubeTexture;
D3DX9.CreateTextureFromFileInMemoryEx(
device,
pointer,
sizeInBytes,
width,
height,
levelCount,
(int)usage,
format,
pool,
(int)filter,
(int)mipFilter,
*(RawColorBGRA*)&colorKey,
imageInformation,
palette,
out cubeTexture);
return cubeTexture;
}
/// <summary>
/// Creates a <see cref="Texture"/> from a stream.
/// </summary>
/// <param name="device">The device.</param>
/// <param name="fileName">Name of the file.</param>
/// <param name="width">The width.</param>
/// <param name="height">The height.</param>
/// <param name="levelCount">The level count.</param>
/// <param name="usage">The usage.</param>
/// <param name="format">The format.</param>
/// <param name="pool">The pool.</param>
/// <param name="filter">The filter.</param>
/// <param name="mipFilter">The mip filter.</param>
/// <param name="colorKey">The color key.</param>
/// <param name="imageInformation">The image information.</param>
/// <param name="palette">The palette.</param>
/// <returns>
/// A <see cref="Texture"/>
/// </returns>
/// <unmanaged>HRESULT D3DXCreateTextureFromFileInMemoryEx([In] IDirect3DDevice9* pDevice,[In] const void* pSrcData,[In] unsigned int SrcDataSize,[In] unsigned int Size,[In] unsigned int MipLevels,[In] unsigned int Usage,[In] D3DFORMAT Format,[In] D3DPOOL Pool,[In] unsigned int Filter,[In] unsigned int MipFilter,[In] D3DCOLOR ColorKey,[Out] D3DXIMAGE_INFO* pSrcInfo,[Out, Buffer] PALETTEENTRY* pPalette,[In] IDirect3DTexture9** ppTexture)</unmanaged>
private static unsafe Texture CreateFromFile(Device device, string fileName, int width, int height, int levelCount, Usage usage, Format format, Pool pool, Filter filter, Filter mipFilter, int colorKey, IntPtr imageInformation, PaletteEntry[] palette)
{
Texture cubeTexture;
D3DX9.CreateTextureFromFileExW(
device,
fileName,
width,
height,
levelCount,
(int)usage,
format,
pool,
(int)filter,
(int)mipFilter,
*(RawColorBGRA*)&colorKey,
imageInformation,
palette,
out cubeTexture);
return cubeTexture;
}
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V9.Resources;
using gax = Google.Api.Gax;
using sys = System;
namespace Google.Ads.GoogleAds.V9.Resources
{
/// <summary>Resource name for the <c>CampaignBudget</c> resource.</summary>
public sealed partial class CampaignBudgetName : gax::IResourceName, sys::IEquatable<CampaignBudgetName>
{
/// <summary>The possible contents of <see cref="CampaignBudgetName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>.
/// </summary>
CustomerCampaignBudget = 1,
}
private static gax::PathTemplate s_customerCampaignBudget = new gax::PathTemplate("customers/{customer_id}/campaignBudgets/{campaign_budget_id}");
/// <summary>Creates a <see cref="CampaignBudgetName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="CampaignBudgetName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static CampaignBudgetName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new CampaignBudgetName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="CampaignBudgetName"/> with the pattern
/// <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="campaignBudgetId">The <c>CampaignBudget</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="CampaignBudgetName"/> constructed from the provided ids.</returns>
public static CampaignBudgetName FromCustomerCampaignBudget(string customerId, string campaignBudgetId) =>
new CampaignBudgetName(ResourceNameType.CustomerCampaignBudget, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), campaignBudgetId: gax::GaxPreconditions.CheckNotNullOrEmpty(campaignBudgetId, nameof(campaignBudgetId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="CampaignBudgetName"/> with pattern
/// <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="campaignBudgetId">The <c>CampaignBudget</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="CampaignBudgetName"/> with pattern
/// <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>.
/// </returns>
public static string Format(string customerId, string campaignBudgetId) =>
FormatCustomerCampaignBudget(customerId, campaignBudgetId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="CampaignBudgetName"/> with pattern
/// <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>.
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="campaignBudgetId">The <c>CampaignBudget</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="CampaignBudgetName"/> with pattern
/// <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>.
/// </returns>
public static string FormatCustomerCampaignBudget(string customerId, string campaignBudgetId) =>
s_customerCampaignBudget.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), gax::GaxPreconditions.CheckNotNullOrEmpty(campaignBudgetId, nameof(campaignBudgetId)));
/// <summary>
/// Parses the given resource name string into a new <see cref="CampaignBudgetName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c></description></item>
/// </list>
/// </remarks>
/// <param name="campaignBudgetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="CampaignBudgetName"/> if successful.</returns>
public static CampaignBudgetName Parse(string campaignBudgetName) => Parse(campaignBudgetName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="CampaignBudgetName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="campaignBudgetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="CampaignBudgetName"/> if successful.</returns>
public static CampaignBudgetName Parse(string campaignBudgetName, bool allowUnparsed) =>
TryParse(campaignBudgetName, allowUnparsed, out CampaignBudgetName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="CampaignBudgetName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c></description></item>
/// </list>
/// </remarks>
/// <param name="campaignBudgetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="CampaignBudgetName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string campaignBudgetName, out CampaignBudgetName result) =>
TryParse(campaignBudgetName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="CampaignBudgetName"/> instance;
/// optionally allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="campaignBudgetName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="CampaignBudgetName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string campaignBudgetName, bool allowUnparsed, out CampaignBudgetName result)
{
gax::GaxPreconditions.CheckNotNull(campaignBudgetName, nameof(campaignBudgetName));
gax::TemplatedResourceName resourceName;
if (s_customerCampaignBudget.TryParseName(campaignBudgetName, out resourceName))
{
result = FromCustomerCampaignBudget(resourceName[0], resourceName[1]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(campaignBudgetName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private CampaignBudgetName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string campaignBudgetId = null, string customerId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
CampaignBudgetId = campaignBudgetId;
CustomerId = customerId;
}
/// <summary>
/// Constructs a new instance of a <see cref="CampaignBudgetName"/> class from the component parts of pattern
/// <c>customers/{customer_id}/campaignBudgets/{campaign_budget_id}</c>
/// </summary>
/// <param name="customerId">The <c>Customer</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="campaignBudgetId">The <c>CampaignBudget</c> ID. Must not be <c>null</c> or empty.</param>
public CampaignBudgetName(string customerId, string campaignBudgetId) : this(ResourceNameType.CustomerCampaignBudget, customerId: gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)), campaignBudgetId: gax::GaxPreconditions.CheckNotNullOrEmpty(campaignBudgetId, nameof(campaignBudgetId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>CampaignBudget</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource
/// name.
/// </summary>
public string CampaignBudgetId { get; }
/// <summary>
/// The <c>Customer</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string CustomerId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.CustomerCampaignBudget: return s_customerCampaignBudget.Expand(CustomerId, CampaignBudgetId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as CampaignBudgetName);
/// <inheritdoc/>
public bool Equals(CampaignBudgetName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(CampaignBudgetName a, CampaignBudgetName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(CampaignBudgetName a, CampaignBudgetName b) => !(a == b);
}
public partial class CampaignBudget
{
/// <summary>
/// <see cref="gagvr::CampaignBudgetName"/>-typed view over the <see cref="ResourceName"/> resource name
/// property.
/// </summary>
internal CampaignBudgetName ResourceNameAsCampaignBudgetName
{
get => string.IsNullOrEmpty(ResourceName) ? null : gagvr::CampaignBudgetName.Parse(ResourceName, allowUnparsed: true);
set => ResourceName = value?.ToString() ?? "";
}
/// <summary>
/// <see cref="gagvr::CampaignBudgetName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
internal CampaignBudgetName CampaignBudgetName
{
get => string.IsNullOrEmpty(Name) ? null : gagvr::CampaignBudgetName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
using Lucene.Net.Codecs.Lucene40;
using Lucene.Net.Diagnostics;
using Lucene.Net.Support;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Lucene.Net.Codecs.Compressing
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using ArrayUtil = Lucene.Net.Util.ArrayUtil;
using BufferedChecksumIndexInput = Lucene.Net.Store.BufferedChecksumIndexInput;
using ByteArrayDataInput = Lucene.Net.Store.ByteArrayDataInput;
using BytesRef = Lucene.Net.Util.BytesRef;
using ChecksumIndexInput = Lucene.Net.Store.ChecksumIndexInput;
using CorruptIndexException = Lucene.Net.Index.CorruptIndexException;
using DataInput = Lucene.Net.Store.DataInput;
using DataOutput = Lucene.Net.Store.DataOutput;
using Directory = Lucene.Net.Store.Directory;
using FieldInfo = Lucene.Net.Index.FieldInfo;
using FieldInfos = Lucene.Net.Index.FieldInfos;
using IndexFileNames = Lucene.Net.Index.IndexFileNames;
using IndexInput = Lucene.Net.Store.IndexInput;
using IOContext = Lucene.Net.Store.IOContext;
using IOUtils = Lucene.Net.Util.IOUtils;
using PackedInt32s = Lucene.Net.Util.Packed.PackedInt32s;
using SegmentInfo = Lucene.Net.Index.SegmentInfo;
using StoredFieldVisitor = Lucene.Net.Index.StoredFieldVisitor;
/// <summary>
/// <see cref="StoredFieldsReader"/> impl for <see cref="CompressingStoredFieldsFormat"/>.
/// <para/>
/// @lucene.experimental
/// </summary>
public sealed class CompressingStoredFieldsReader : StoredFieldsReader
{
// Do not reuse the decompression buffer when there is more than 32kb to decompress
private static readonly int BUFFER_REUSE_THRESHOLD = 1 << 15;
private readonly int version;
private readonly FieldInfos fieldInfos;
private readonly CompressingStoredFieldsIndexReader indexReader;
private readonly long maxPointer;
private readonly IndexInput fieldsStream;
private readonly int chunkSize;
private readonly int packedIntsVersion;
private readonly CompressionMode compressionMode;
private readonly Decompressor decompressor;
private readonly BytesRef bytes;
private readonly int numDocs;
private bool closed;
// used by clone
private CompressingStoredFieldsReader(CompressingStoredFieldsReader reader)
{
this.version = reader.version;
this.fieldInfos = reader.fieldInfos;
this.fieldsStream = (IndexInput)reader.fieldsStream.Clone();
this.indexReader = (CompressingStoredFieldsIndexReader)reader.indexReader.Clone();
this.maxPointer = reader.maxPointer;
this.chunkSize = reader.chunkSize;
this.packedIntsVersion = reader.packedIntsVersion;
this.compressionMode = reader.compressionMode;
this.decompressor = (Decompressor)reader.decompressor.Clone();
this.numDocs = reader.numDocs;
this.bytes = new BytesRef(reader.bytes.Bytes.Length);
this.closed = false;
}
/// <summary>
/// Sole constructor. </summary>
public CompressingStoredFieldsReader(Directory d, SegmentInfo si, string segmentSuffix, FieldInfos fn, IOContext context, string formatName, CompressionMode compressionMode)
{
this.compressionMode = compressionMode;
string segment = si.Name;
bool success = false;
fieldInfos = fn;
numDocs = si.DocCount;
ChecksumIndexInput indexStream = null;
try
{
string indexStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
string fieldsStreamFN = IndexFileNames.SegmentFileName(segment, segmentSuffix, Lucene40StoredFieldsWriter.FIELDS_EXTENSION);
// Load the index into memory
indexStream = d.OpenChecksumInput(indexStreamFN, context);
string codecNameIdx = formatName + CompressingStoredFieldsWriter.CODEC_SFX_IDX;
version = CodecUtil.CheckHeader(indexStream, codecNameIdx, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT);
if (Debugging.AssertsEnabled) Debugging.Assert(CodecUtil.HeaderLength(codecNameIdx) == indexStream.GetFilePointer());
indexReader = new CompressingStoredFieldsIndexReader(indexStream, si);
long maxPointer = -1;
if (version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM)
{
maxPointer = indexStream.ReadVInt64();
CodecUtil.CheckFooter(indexStream);
}
else
{
#pragma warning disable 612, 618
CodecUtil.CheckEOF(indexStream);
#pragma warning restore 612, 618
}
indexStream.Dispose();
indexStream = null;
// Open the data file and read metadata
fieldsStream = d.OpenInput(fieldsStreamFN, context);
if (version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM)
{
if (maxPointer + CodecUtil.FooterLength() != fieldsStream.Length)
{
throw new CorruptIndexException("Invalid fieldsStream maxPointer (file truncated?): maxPointer=" + maxPointer + ", length=" + fieldsStream.Length);
}
}
else
{
maxPointer = fieldsStream.Length;
}
this.maxPointer = maxPointer;
string codecNameDat = formatName + CompressingStoredFieldsWriter.CODEC_SFX_DAT;
int fieldsVersion = CodecUtil.CheckHeader(fieldsStream, codecNameDat, CompressingStoredFieldsWriter.VERSION_START, CompressingStoredFieldsWriter.VERSION_CURRENT);
if (version != fieldsVersion)
{
throw new CorruptIndexException("Version mismatch between stored fields index and data: " + version + " != " + fieldsVersion);
}
if (Debugging.AssertsEnabled) Debugging.Assert(CodecUtil.HeaderLength(codecNameDat) == fieldsStream.GetFilePointer());
if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS)
{
chunkSize = fieldsStream.ReadVInt32();
}
else
{
chunkSize = -1;
}
packedIntsVersion = fieldsStream.ReadVInt32();
decompressor = compressionMode.NewDecompressor();
this.bytes = new BytesRef();
success = true;
}
finally
{
if (!success)
{
IOUtils.DisposeWhileHandlingException(this, indexStream);
}
}
}
/// <exception cref="ObjectDisposedException"> If this FieldsReader is disposed. </exception>
private void EnsureOpen()
{
if (closed)
{
throw new ObjectDisposedException(this.GetType().FullName, "this FieldsReader is closed");
}
}
/// <summary>
/// Dispose the underlying <see cref="IndexInput"/>s.
/// </summary>
protected override void Dispose(bool disposing)
{
if (!closed)
{
IOUtils.Dispose(fieldsStream);
closed = true;
}
}
private static void ReadField(DataInput @in, StoredFieldVisitor visitor, FieldInfo info, int bits)
{
switch (bits & CompressingStoredFieldsWriter.TYPE_MASK)
{
case CompressingStoredFieldsWriter.BYTE_ARR:
int length = @in.ReadVInt32();
var data = new byte[length];
@in.ReadBytes(data, 0, length);
visitor.BinaryField(info, data);
break;
case CompressingStoredFieldsWriter.STRING:
length = @in.ReadVInt32();
data = new byte[length];
@in.ReadBytes(data, 0, length);
#pragma warning disable 612, 618
visitor.StringField(info, IOUtils.CHARSET_UTF_8.GetString(data));
#pragma warning restore 612, 618
break;
case CompressingStoredFieldsWriter.NUMERIC_INT32:
visitor.Int32Field(info, @in.ReadInt32());
break;
case CompressingStoredFieldsWriter.NUMERIC_SINGLE:
visitor.SingleField(info, J2N.BitConversion.Int32BitsToSingle(@in.ReadInt32()));
break;
case CompressingStoredFieldsWriter.NUMERIC_INT64:
visitor.Int64Field(info, @in.ReadInt64());
break;
case CompressingStoredFieldsWriter.NUMERIC_DOUBLE:
visitor.DoubleField(info, J2N.BitConversion.Int64BitsToDouble(@in.ReadInt64()));
break;
default:
throw new InvalidOperationException("Unknown type flag: " + bits.ToString("x"));
}
}
private static void SkipField(DataInput @in, int bits)
{
switch (bits & CompressingStoredFieldsWriter.TYPE_MASK)
{
case CompressingStoredFieldsWriter.BYTE_ARR:
case CompressingStoredFieldsWriter.STRING:
int length = @in.ReadVInt32();
@in.SkipBytes(length);
break;
case CompressingStoredFieldsWriter.NUMERIC_INT32:
case CompressingStoredFieldsWriter.NUMERIC_SINGLE:
@in.ReadInt32();
break;
case CompressingStoredFieldsWriter.NUMERIC_INT64:
case CompressingStoredFieldsWriter.NUMERIC_DOUBLE:
@in.ReadInt64();
break;
default:
throw new InvalidOperationException("Unknown type flag: " + bits.ToString("x"));
}
}
public override void VisitDocument(int docID, StoredFieldVisitor visitor)
{
fieldsStream.Seek(indexReader.GetStartPointer(docID));
int docBase = fieldsStream.ReadVInt32();
int chunkDocs = fieldsStream.ReadVInt32();
if (docID < docBase || docID >= docBase + chunkDocs || docBase + chunkDocs > numDocs)
{
throw new CorruptIndexException("Corrupted: docID=" + docID + ", docBase=" + docBase + ", chunkDocs=" + chunkDocs + ", numDocs=" + numDocs + " (resource=" + fieldsStream + ")");
}
int numStoredFields, offset, length, totalLength;
if (chunkDocs == 1)
{
numStoredFields = fieldsStream.ReadVInt32();
offset = 0;
length = fieldsStream.ReadVInt32();
totalLength = length;
}
else
{
int bitsPerStoredFields = fieldsStream.ReadVInt32();
if (bitsPerStoredFields == 0)
{
numStoredFields = fieldsStream.ReadVInt32();
}
else if (bitsPerStoredFields > 31)
{
throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")");
}
else
{
long filePointer = fieldsStream.GetFilePointer();
PackedInt32s.Reader reader = PackedInt32s.GetDirectReaderNoHeader(fieldsStream, PackedInt32s.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerStoredFields);
numStoredFields = (int)(reader.Get(docID - docBase));
fieldsStream.Seek(filePointer + PackedInt32s.Format.PACKED.ByteCount(packedIntsVersion, chunkDocs, bitsPerStoredFields));
}
int bitsPerLength = fieldsStream.ReadVInt32();
if (bitsPerLength == 0)
{
length = fieldsStream.ReadVInt32();
offset = (docID - docBase) * length;
totalLength = chunkDocs * length;
}
else if (bitsPerStoredFields > 31)
{
throw new CorruptIndexException("bitsPerLength=" + bitsPerLength + " (resource=" + fieldsStream + ")");
}
else
{
PackedInt32s.IReaderIterator it = PackedInt32s.GetReaderIteratorNoHeader(fieldsStream, PackedInt32s.Format.PACKED, packedIntsVersion, chunkDocs, bitsPerLength, 1);
int off = 0;
for (int i = 0; i < docID - docBase; ++i)
{
off += (int)it.Next();
}
offset = off;
length = (int)it.Next();
off += length;
for (int i = docID - docBase + 1; i < chunkDocs; ++i)
{
off += (int)it.Next();
}
totalLength = off;
}
}
if ((length == 0) != (numStoredFields == 0))
{
throw new CorruptIndexException("length=" + length + ", numStoredFields=" + numStoredFields + " (resource=" + fieldsStream + ")");
}
if (numStoredFields == 0)
{
// nothing to do
return;
}
DataInput documentInput;
if (version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS && totalLength >= 2 * chunkSize)
{
if (Debugging.AssertsEnabled)
{
Debugging.Assert(chunkSize > 0);
Debugging.Assert(offset < chunkSize);
}
decompressor.Decompress(fieldsStream, chunkSize, offset, Math.Min(length, chunkSize - offset), bytes);
documentInput = new DataInputAnonymousInnerClassHelper(this, offset, length);
}
else
{
BytesRef bytes = totalLength <= BUFFER_REUSE_THRESHOLD ? this.bytes : new BytesRef();
decompressor.Decompress(fieldsStream, totalLength, offset, length, bytes);
if (Debugging.AssertsEnabled) Debugging.Assert(bytes.Length == length);
documentInput = new ByteArrayDataInput(bytes.Bytes, bytes.Offset, bytes.Length);
}
for (int fieldIDX = 0; fieldIDX < numStoredFields; fieldIDX++)
{
long infoAndBits = documentInput.ReadVInt64();
int fieldNumber = (int)((long)((ulong)infoAndBits >> CompressingStoredFieldsWriter.TYPE_BITS));
FieldInfo fieldInfo = fieldInfos.FieldInfo(fieldNumber);
int bits = (int)(infoAndBits & CompressingStoredFieldsWriter.TYPE_MASK);
if (Debugging.AssertsEnabled) Debugging.Assert(bits <= CompressingStoredFieldsWriter.NUMERIC_DOUBLE, () => "bits=" + bits.ToString("x"));
switch (visitor.NeedsField(fieldInfo))
{
case StoredFieldVisitor.Status.YES:
ReadField(documentInput, visitor, fieldInfo, bits);
break;
case StoredFieldVisitor.Status.NO:
SkipField(documentInput, bits);
break;
case StoredFieldVisitor.Status.STOP:
return;
}
}
}
private class DataInputAnonymousInnerClassHelper : DataInput
{
private readonly CompressingStoredFieldsReader outerInstance;
private int offset;
private int length;
public DataInputAnonymousInnerClassHelper(CompressingStoredFieldsReader outerInstance, int offset, int length)
{
this.outerInstance = outerInstance;
this.offset = offset;
this.length = length;
decompressed = outerInstance.bytes.Length;
}
internal int decompressed;
internal virtual void FillBuffer()
{
if (Debugging.AssertsEnabled) Debugging.Assert(decompressed <= length);
if (decompressed == length)
{
throw new Exception();
}
int toDecompress = Math.Min(length - decompressed, outerInstance.chunkSize);
outerInstance.decompressor.Decompress(outerInstance.fieldsStream, toDecompress, 0, toDecompress, outerInstance.bytes);
decompressed += toDecompress;
}
public override byte ReadByte()
{
if (outerInstance.bytes.Length == 0)
{
FillBuffer();
}
--outerInstance.bytes.Length;
return (byte)outerInstance.bytes.Bytes[outerInstance.bytes.Offset++];
}
public override void ReadBytes(byte[] b, int offset, int len)
{
while (len > outerInstance.bytes.Length)
{
Array.Copy(outerInstance.bytes.Bytes, outerInstance.bytes.Offset, b, offset, outerInstance.bytes.Length);
len -= outerInstance.bytes.Length;
offset += outerInstance.bytes.Length;
FillBuffer();
}
Array.Copy(outerInstance.bytes.Bytes, outerInstance.bytes.Offset, b, offset, len);
outerInstance.bytes.Offset += len;
outerInstance.bytes.Length -= len;
}
}
public override object Clone()
{
EnsureOpen();
return new CompressingStoredFieldsReader(this);
}
internal int Version => version;
internal CompressionMode CompressionMode => compressionMode;
internal int ChunkSize => chunkSize;
internal ChunkIterator GetChunkIterator(int startDocID)
{
EnsureOpen();
return new ChunkIterator(this, startDocID);
}
internal sealed class ChunkIterator
{
private readonly CompressingStoredFieldsReader outerInstance;
internal readonly ChecksumIndexInput fieldsStream;
internal readonly BytesRef spare;
internal readonly BytesRef bytes;
internal int docBase;
internal int chunkDocs;
internal int[] numStoredFields;
internal int[] lengths;
internal ChunkIterator(CompressingStoredFieldsReader outerInstance, int startDocId)
{
this.outerInstance = outerInstance;
this.docBase = -1;
bytes = new BytesRef();
spare = new BytesRef();
numStoredFields = new int[1];
lengths = new int[1];
IndexInput @in = outerInstance.fieldsStream;
@in.Seek(0);
fieldsStream = new BufferedChecksumIndexInput(@in);
fieldsStream.Seek(outerInstance.indexReader.GetStartPointer(startDocId));
}
/// <summary>
/// Return the decompressed size of the chunk
/// </summary>
internal int ChunkSize()
{
int sum = 0;
for (int i = 0; i < chunkDocs; ++i)
{
sum += lengths[i];
}
return sum;
}
/// <summary>
/// Go to the chunk containing the provided <paramref name="doc"/> ID.
/// </summary>
internal void Next(int doc)
{
if (Debugging.AssertsEnabled) Debugging.Assert(doc >= this.docBase + this.chunkDocs, () => doc + " " + this.docBase + " " + this.chunkDocs);
fieldsStream.Seek(outerInstance.indexReader.GetStartPointer(doc));
int docBase = fieldsStream.ReadVInt32();
int chunkDocs = fieldsStream.ReadVInt32();
if (docBase < this.docBase + this.chunkDocs || docBase + chunkDocs > outerInstance.numDocs)
{
throw new CorruptIndexException("Corrupted: current docBase=" + this.docBase + ", current numDocs=" + this.chunkDocs + ", new docBase=" + docBase + ", new numDocs=" + chunkDocs + " (resource=" + fieldsStream + ")");
}
this.docBase = docBase;
this.chunkDocs = chunkDocs;
if (chunkDocs > numStoredFields.Length)
{
int newLength = ArrayUtil.Oversize(chunkDocs, 4);
numStoredFields = new int[newLength];
lengths = new int[newLength];
}
if (chunkDocs == 1)
{
numStoredFields[0] = fieldsStream.ReadVInt32();
lengths[0] = fieldsStream.ReadVInt32();
}
else
{
int bitsPerStoredFields = fieldsStream.ReadVInt32();
if (bitsPerStoredFields == 0)
{
Arrays.Fill(numStoredFields, 0, chunkDocs, fieldsStream.ReadVInt32());
}
else if (bitsPerStoredFields > 31)
{
throw new CorruptIndexException("bitsPerStoredFields=" + bitsPerStoredFields + " (resource=" + fieldsStream + ")");
}
else
{
PackedInt32s.IReaderIterator it = PackedInt32s.GetReaderIteratorNoHeader(fieldsStream, PackedInt32s.Format.PACKED, outerInstance.packedIntsVersion, chunkDocs, bitsPerStoredFields, 1);
for (int i = 0; i < chunkDocs; ++i)
{
numStoredFields[i] = (int)it.Next();
}
}
int bitsPerLength = fieldsStream.ReadVInt32();
if (bitsPerLength == 0)
{
Arrays.Fill(lengths, 0, chunkDocs, fieldsStream.ReadVInt32());
}
else if (bitsPerLength > 31)
{
throw new CorruptIndexException("bitsPerLength=" + bitsPerLength);
}
else
{
PackedInt32s.IReaderIterator it = PackedInt32s.GetReaderIteratorNoHeader(fieldsStream, PackedInt32s.Format.PACKED, outerInstance.packedIntsVersion, chunkDocs, bitsPerLength, 1);
for (int i = 0; i < chunkDocs; ++i)
{
lengths[i] = (int)it.Next();
}
}
}
}
/// <summary>
/// Decompress the chunk.
/// </summary>
internal void Decompress()
{
// decompress data
int chunkSize = ChunkSize();
if (outerInstance.version >= CompressingStoredFieldsWriter.VERSION_BIG_CHUNKS && chunkSize >= 2 * outerInstance.chunkSize)
{
bytes.Offset = bytes.Length = 0;
for (int decompressed = 0; decompressed < chunkSize; )
{
int toDecompress = Math.Min(chunkSize - decompressed, outerInstance.chunkSize);
outerInstance.decompressor.Decompress(fieldsStream, toDecompress, 0, toDecompress, spare);
bytes.Bytes = ArrayUtil.Grow(bytes.Bytes, bytes.Length + spare.Length);
Array.Copy(spare.Bytes, spare.Offset, bytes.Bytes, bytes.Length, spare.Length);
bytes.Length += spare.Length;
decompressed += toDecompress;
}
}
else
{
outerInstance.decompressor.Decompress(fieldsStream, chunkSize, 0, chunkSize, bytes);
}
if (bytes.Length != chunkSize)
{
throw new CorruptIndexException("Corrupted: expected chunk size = " + ChunkSize() + ", got " + bytes.Length + " (resource=" + fieldsStream + ")");
}
}
/// <summary>
/// Copy compressed data.
/// </summary>
internal void CopyCompressedData(DataOutput @out)
{
if (Debugging.AssertsEnabled) Debugging.Assert(outerInstance.Version == CompressingStoredFieldsWriter.VERSION_CURRENT);
long chunkEnd = docBase + chunkDocs == outerInstance.numDocs ? outerInstance.maxPointer : outerInstance.indexReader.GetStartPointer(docBase + chunkDocs);
@out.CopyBytes(fieldsStream, chunkEnd - fieldsStream.GetFilePointer());
}
/// <summary>
/// Check integrity of the data. The iterator is not usable after this method has been called.
/// </summary>
internal void CheckIntegrity()
{
if (outerInstance.version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM)
{
fieldsStream.Seek(fieldsStream.Length - CodecUtil.FooterLength());
CodecUtil.CheckFooter(fieldsStream);
}
}
}
public override long RamBytesUsed()
{
return indexReader.RamBytesUsed();
}
public override void CheckIntegrity()
{
if (version >= CompressingStoredFieldsWriter.VERSION_CHECKSUM)
{
CodecUtil.ChecksumEntireFile(fieldsStream);
}
}
}
}
| |
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
namespace scrollslive.Mod
{
public class Recorder : ICommListener, IOkCancelCallback
{
public List<String> messages = new List<String>();
private String saveFolder;
private string gameID;
private Mod uiClass;
//private DateTime timestamp;
private MiniCommunicator comm;
public bool recording = false;
private BattleMode bttlmd = null;
private BattleModeUI bttlmdUI = null;
private GameStateCreator gsc;
private int lastwasturn = 0;
private bool lastwasturnwhite=true;
private List<Message> tempmessages = new List<Message>();
private Settings settings;
private CommunicationManager cm;
public bool isReplay = false;
public Recorder(String saveFolder, Mod uiClass, Settings stngs, CommunicationManager c)
{
this.settings = stngs;
this.cm = c;
this.saveFolder = saveFolder;
//App.Communicator.addListener(this);
this.comm = App.Communicator;
this.comm.addListener(this);
/*IpPort address = App.SceneValues.battleMode.address;
if (!address.Equals(App.Communicator.getAddress()))
{
this.comm = App.SceneValues.battleMode.specCommGameObject.GetComponent<MiniCommunicator>();
this.comm.addListener(this);
this.comm.setEnabled(true, true);
}
*/
this.uiClass = uiClass;
this.recording = true;
//timestamp = DateTime.Now;
gsc = new GameStateCreator();
}
public void addMessage(string s)
{
messages.Add(s);
//send message to watchers :D
cm.broadcastBattleMessageToWatchers(s);
}
public void recordSpectator()
{
this.comm.removeListener(this);
this.comm = new Communicator();
IpPort address = App.SceneValues.battleMode.address;
if (!address.Equals(App.Communicator.getAddress()))
{
this.comm = App.SceneValues.battleMode.specCommGameObject.GetComponent<MiniCommunicator>();
this.comm.addListener(this);
this.comm.setEnabled(true, true);
}
addMessage("{\"version\":\"1.1.0\",\"assetURL\":\"http://download.scrolls.com/assets/\",\"roles\":\"GAME,RESOURCE\",\"msg\":\"ServerInfo\"}");
addMessage(App.SceneValues.battleMode.msg.getRawText());
gameID = (App.SceneValues.battleMode.msg as GameInfoMessage).gameId.ToString();
gsc.updateIdols((App.SceneValues.battleMode.msg as GameInfoMessage).whiteIdols);
gsc.updateIdols((App.SceneValues.battleMode.msg as GameInfoMessage).blackIdols);
gsc.setTurn(0);
gsc.whitePlayerName = (App.SceneValues.battleMode.msg as GameInfoMessage).white;
gsc.blackPlayerName = (App.SceneValues.battleMode.msg as GameInfoMessage).black;
}
public void handleMessage(Message msg)
{
// save the message on help-list
if (!(msg is GameInfoMessage) && !(msg is GameStateMessage) && !(msg is NewEffectsMessage) && !(msg is ActiveResourcesMessage) && !(msg is ServerInfoMessage)) return;
if (msg is GameInfoMessage)
{
gameID = (msg as GameInfoMessage).gameId.ToString();
}
tempmessages.Add(msg);
//set the idol hp
if (msg is GameStateMessage)//updating idols, for start of game
{
gsc.GameStateupdateIdols((msg as GameStateMessage).whiteGameState.board.idols, true);
gsc.GameStateupdateIdols((msg as GameStateMessage).blackGameState.board.idols, false);
gsc.setTurn((msg as GameStateMessage).turn);
gsc.whitePlayerName = (msg as GameStateMessage).whiteGameState.playerName;
gsc.blackPlayerName = (msg as GameStateMessage).blackGameState.playerName;
}
if (msg is GameInfoMessage)//updating idols, for start of game
{
gsc.updateIdols((msg as GameInfoMessage).whiteIdols);
gsc.updateIdols((msg as GameInfoMessage).blackIdols);
gsc.setTurn(0);
gsc.whitePlayerName = (msg as GameInfoMessage).white;
gsc.blackPlayerName = (msg as GameInfoMessage).black;
}
saveTillBeginTurn(); // try to save the tempmessages in messages
}
private void saveTillBeginTurn()
{
while (tempmessages.Count >= 1 && !tempmessages[0].getRawText().Contains("TurnBegin"))
{
Message msg = tempmessages[0];
tempmessages.RemoveAt(0);
addMessage(msg.getRawText());
Console.WriteLine("## add to recorder:" + msg.getRawText() + "\r\n###");
if (msg is NewEffectsMessage && msg.getRawText().Contains("IdolUpdate"))//updating idols, battlemode is to slow for us
{
List<EffectMessage> idolupdates = new List<EffectMessage>();
foreach (EffectMessage current in NewEffectsMessage.parseEffects(msg.getRawText()))
{
if (current.type == "IdolUpdate") { idolupdates.Add(current); }
}
gsc.updateIdols(idolupdates);
}
if (msg is NewEffectsMessage && msg.getRawText().Contains("EndGame"))
{
this.stoprec();
}
}
}
public void turnBeginEnds()
{
saveTillBeginTurn();// maybe there are messages in front of turnbegin messages (prob. = 0)
// save beginTurn effect
if(tempmessages.Count==0) return;
Message msg = tempmessages[0];
addMessage(msg.getRawText());
Console.WriteLine("### add to recorder (turnbegin):" + msg.getRawText() + "\r\n###");
if (msg is NewEffectsMessage && msg.getRawText().Contains("TurnBegin"))
{
List<EffectMessage> idolupdates = new List<EffectMessage>();
foreach (EffectMessage current in NewEffectsMessage.parseEffects(msg.getRawText()))
{
if (current.type == "TurnBegin") { gsc.setTurn((current as EMTurnBegin).turn); }
}
this.lastwasturnwhite = true;
if (msg.getRawText().Contains("black")) this.lastwasturnwhite = false;
}
tempmessages.RemoveAt(0); //remove the turnbegin messages
// create gamestatus + save it
string mygsc = gsc.create(this.bttlmd, this.bttlmdUI, this.lastwasturnwhite);
if (!this.isReplay)
{
Console.WriteLine("##created:" + mygsc);
addMessage(mygsc);
}
// add the next saved messages
saveTillBeginTurn();
}
public void onConnect(OnConnectData ocd)
{
return; //I (still) don't care
}
public void stoprecording()
{
addMessage("{\"effects\":[{\"EndGame\":{\"winner\":\"black\",\"whiteStats\":{\"profileId\":\"RobotEasy\",\"idolDamage\":0,\"unitDamage\":0,\"unitsPlayed\":0,\"spellsPlayed\":0,\"enchantmentsPlayed\":0,\"scrollsDrawn\":0,\"totalMs\":1,\"mostDamageUnit\":0,\"idolsDestroyed\":0},\"blackStats\":{\"profileId\":\"RobotEasy\",\"idolDamage\":0,\"unitDamage\":0,\"unitsPlayed\":0,\"spellsPlayed\":0,\"enchantmentsPlayed\":0,\"scrollsDrawn\":0,\"totalMs\":1,\"mostDamageUnit\":0,\"idolsDestroyed\":0},\"whiteGoldReward\":{\"matchReward\":0,\"matchCompletionReward\":0,\"idolsDestroyedReward\":0,\"totalReward\":0},\"blackGoldReward\":{\"matchReward\":0,\"matchCompletionReward\":0,\"idolsDestroyedReward\":0,\"totalReward\":0}}}],\"msg\":\"NewEffects\"}");
stoprec();
}
public void stoprec()
{
this.recording = false;
this.cm.google.isWatching = false;
this.comm.removeListener(this);
this.cm.stopwatching();
}
public void setBm(BattleMode b)
{
this.bttlmd = b;
}
public void setBmUI(BattleModeUI bui)
{
this.bttlmdUI = bui;
}
public void PopupOk(string s)
{
}
public void PopupCancel(string s)
{
}
}
}
| |
/* httpcontext-simulator
* a simulator used to simulate http context during integration testing
*
* Copyright (C) Phil Haack
* http://code.google.com/p/httpcontext-simulator/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
namespace HttpSimulator
{
#region
using System;
using System.Collections;
using System.Collections.Specialized;
using System.IO;
using System.Security.Permissions;
using System.Text;
using System.Web;
using System.Web.Caching;
#endregion
/// <summary>
/// The HttpResponse interface.
/// </summary>
public interface IHttpResponse
{
#region Public Properties
/// <summary>
/// Gets or sets a value indicating whether Buffer.
/// </summary>
bool Buffer { get; set; }
/// <summary>
/// Gets or sets a value indicating whether BufferOutput.
/// </summary>
bool BufferOutput { get; set; }
/// <summary>
/// Gets Cache.
/// </summary>
HttpCachePolicy Cache { get; }
/// <summary>
/// Gets or sets CacheControl.
/// </summary>
string CacheControl { get; set; }
/// <summary>
/// Gets or sets Charset.
/// </summary>
string Charset { get; set; }
/// <summary>
/// Gets or sets ContentEncoding.
/// </summary>
Encoding ContentEncoding { get; set; }
/// <summary>
/// Gets or sets ContentType.
/// </summary>
string ContentType { get; set; }
/// <summary>
/// Gets Cookies.
/// </summary>
HttpCookieCollection Cookies { get; }
/// <summary>
/// Gets or sets Expires.
/// </summary>
int Expires { get; set; }
/// <summary>
/// Gets or sets ExpiresAbsolute.
/// </summary>
DateTime ExpiresAbsolute { get; set; }
/// <summary>
/// Gets or sets Filter.
/// </summary>
Stream Filter { get; set; }
/// <summary>
/// Gets or sets HeaderEncoding.
/// </summary>
Encoding HeaderEncoding { get; set; }
/// <summary>
/// Gets Headers.
/// </summary>
NameValueCollection Headers { get; }
/// <summary>
/// Gets a value indicating whether IsClientConnected.
/// </summary>
bool IsClientConnected { get; }
/// <summary>
/// Gets a value indicating whether IsRequestBeingRedirected.
/// </summary>
bool IsRequestBeingRedirected { get; }
/// <summary>
/// Gets Output.
/// </summary>
TextWriter Output { get; }
/// <summary>
/// Gets OutputStream.
/// </summary>
Stream OutputStream { get; }
/// <summary>
/// Gets or sets RedirectLocation.
/// </summary>
string RedirectLocation { get; set; }
/// <summary>
/// Gets or sets Status.
/// </summary>
string Status { get; set; }
/// <summary>
/// Gets or sets StatusCode.
/// </summary>
int StatusCode { get; set; }
/// <summary>
/// Gets or sets StatusDescription.
/// </summary>
string StatusDescription { get; set; }
/// <summary>
/// Gets or sets SubStatusCode.
/// </summary>
int SubStatusCode { get; set; }
/// <summary>
/// Gets or sets a value indicating whether SuppressContent.
/// </summary>
bool SuppressContent { get; set; }
#endregion
#region Public Methods and Operators
/// <summary>
/// The add cache dependency.
/// </summary>
/// <param name="dependencies">
/// The dependencies.
/// </param>
void AddCacheDependency(params CacheDependency[] dependencies);
/// <summary>
/// The add cache item dependencies.
/// </summary>
/// <param name="cacheKeys">
/// The cache keys.
/// </param>
void AddCacheItemDependencies(string[] cacheKeys);
/// <summary>
/// The add cache item dependencies.
/// </summary>
/// <param name="cacheKeys">
/// The cache keys.
/// </param>
void AddCacheItemDependencies(ArrayList cacheKeys);
/// <summary>
/// The add cache item dependency.
/// </summary>
/// <param name="cacheKey">
/// The cache key.
/// </param>
void AddCacheItemDependency(string cacheKey);
/// <summary>
/// The add file dependencies.
/// </summary>
/// <param name="filenames">
/// The filenames.
/// </param>
void AddFileDependencies(ArrayList filenames);
/// <summary>
/// The add file dependencies.
/// </summary>
/// <param name="filenames">
/// The filenames.
/// </param>
void AddFileDependencies(string[] filenames);
/// <summary>
/// The add file dependency.
/// </summary>
/// <param name="filename">
/// The filename.
/// </param>
void AddFileDependency(string filename);
/// <summary>
/// The add header.
/// </summary>
/// <param name="name">
/// The name.
/// </param>
/// <param name="value">
/// The value.
/// </param>
void AddHeader(string name, string value);
/// <summary>
/// The append cookie.
/// </summary>
/// <param name="cookie">
/// The cookie.
/// </param>
void AppendCookie(HttpCookie cookie);
/// <summary>
/// The append header.
/// </summary>
/// <param name="name">
/// The name.
/// </param>
/// <param name="value">
/// The value.
/// </param>
void AppendHeader(string name, string value);
/// <summary>
/// The append to log.
/// </summary>
/// <param name="param">
/// The param.
/// </param>
[AspNetHostingPermission(SecurityAction.Demand, Level = AspNetHostingPermissionLevel.Medium)]
void AppendToLog(string param);
/// <summary>
/// Applies the app path modifier.
/// </summary>
/// <param name="virtualPath">The virtual path.</param>
/// <returns>
/// The apply app path modifier.
/// </returns>
string ApplyAppPathModifier(string virtualPath);
/// <summary>
/// The binary write.
/// </summary>
/// <param name="buffer">
/// The buffer.
/// </param>
void BinaryWrite(byte[] buffer);
/// <summary>
/// The clear.
/// </summary>
void Clear();
/// <summary>
/// The clear content.
/// </summary>
void ClearContent();
/// <summary>
/// The clear headers.
/// </summary>
void ClearHeaders();
/// <summary>
/// The close.
/// </summary>
void Close();
/// <summary>
/// The disable kernel cache.
/// </summary>
void DisableKernelCache();
/// <summary>
/// The end.
/// </summary>
void End();
/// <summary>
/// The flush.
/// </summary>
void Flush();
/// <summary>
/// The pics.
/// </summary>
/// <param name="value">
/// The value.
/// </param>
void Pics(string value);
/// <summary>
/// The redirect.
/// </summary>
/// <param name="url">
/// The url.
/// </param>
void Redirect(string url);
/// <summary>
/// The redirect.
/// </summary>
/// <param name="url">
/// The url.
/// </param>
/// <param name="endResponse">
/// The end response.
/// </param>
void Redirect(string url, bool endResponse);
/// <summary>
/// The set cookie.
/// </summary>
/// <param name="cookie">
/// The cookie.
/// </param>
void SetCookie(HttpCookie cookie);
/// <summary>
/// The transmit file.
/// </summary>
/// <param name="filename">
/// The filename.
/// </param>
void TransmitFile(string filename);
/// <summary>
/// The transmit file.
/// </summary>
/// <param name="filename">
/// The filename.
/// </param>
/// <param name="offset">
/// The offset.
/// </param>
/// <param name="length">
/// The length.
/// </param>
void TransmitFile(string filename, long offset, long length);
/// <summary>
/// The write.
/// </summary>
/// <param name="ch">
/// The ch.
/// </param>
void Write(char ch);
/// <summary>
/// The write.
/// </summary>
/// <param name="obj">
/// The obj.
/// </param>
void Write(object obj);
/// <summary>
/// The write.
/// </summary>
/// <param name="s">
/// The s.
/// </param>
void Write(string s);
/// <summary>
/// The write.
/// </summary>
/// <param name="buffer">
/// The buffer.
/// </param>
/// <param name="index">
/// The index.
/// </param>
/// <param name="count">
/// The count.
/// </param>
void Write(char[] buffer, int index, int count);
/// <summary>
/// The write file.
/// </summary>
/// <param name="filename">
/// The filename.
/// </param>
void WriteFile(string filename);
/// <summary>
/// The write file.
/// </summary>
/// <param name="filename">
/// The filename.
/// </param>
/// <param name="readIntoMemory">
/// The read into memory.
/// </param>
void WriteFile(string filename, bool readIntoMemory);
/// <summary>
/// The write file.
/// </summary>
/// <param name="fileHandle">
/// The file handle.
/// </param>
/// <param name="offset">
/// The offset.
/// </param>
/// <param name="size">
/// The size.
/// </param>
[SecurityPermission(SecurityAction.Demand, UnmanagedCode = true)]
void WriteFile(IntPtr fileHandle, long offset, long size);
/// <summary>
/// The write file.
/// </summary>
/// <param name="filename">
/// The filename.
/// </param>
/// <param name="offset">
/// The offset.
/// </param>
/// <param name="size">
/// The size.
/// </param>
void WriteFile(string filename, long offset, long size);
/// <summary>
/// The write substitution.
/// </summary>
/// <param name="callback">
/// The callback.
/// </param>
void WriteSubstitution(HttpResponseSubstitutionCallback callback);
#endregion
// Properties
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Xml;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.Configuration;
using Orleans.MultiCluster;
using Orleans.Runtime.MembershipService;
using Orleans.Runtime.MultiClusterNetwork;
using Orleans.Versions;
using Orleans.Versions.Compatibility;
using Orleans.Versions.Selector;
namespace Orleans.Runtime.Management
{
/// <summary>
/// Implementation class for the Orleans management grain.
/// </summary>
[OneInstancePerCluster]
internal class ManagementGrain : Grain, IManagementGrain
{
private readonly MultiClusterOptions multiClusterOptions;
private readonly IMultiClusterOracle multiClusterOracle;
private readonly IInternalGrainFactory internalGrainFactory;
private readonly ISiloStatusOracle siloStatusOracle;
private readonly GrainTypeManager grainTypeManager;
private readonly IVersionStore versionStore;
private ILogger logger;
private IMembershipTable membershipTable;
public ManagementGrain(
IOptions<MultiClusterOptions> multiClusterOptions,
IMultiClusterOracle multiClusterOracle,
IInternalGrainFactory internalGrainFactory,
ISiloStatusOracle siloStatusOracle,
IMembershipTable membershipTable,
GrainTypeManager grainTypeManager,
IVersionStore versionStore,
ILogger<ManagementGrain> logger)
{
this.multiClusterOptions = multiClusterOptions.Value;
this.multiClusterOracle = multiClusterOracle;
this.internalGrainFactory = internalGrainFactory;
this.membershipTable = membershipTable;
this.siloStatusOracle = siloStatusOracle;
this.grainTypeManager = grainTypeManager;
this.versionStore = versionStore;
this.logger = logger;
}
public async Task<Dictionary<SiloAddress, SiloStatus>> GetHosts(bool onlyActive = false)
{
// If the status oracle isn't MembershipOracle, then it is assumed that it does not use IMembershipTable.
// In that event, return the approximate silo statuses from the status oracle.
if (!(this.siloStatusOracle is MembershipOracle)) return this.siloStatusOracle.GetApproximateSiloStatuses(onlyActive);
// Explicitly read the membership table and return the results.
var table = await GetMembershipTable();
var members = await table.ReadAll();
var results = onlyActive
? members.Members.Where(item => item.Item1.Status == SiloStatus.Active)
: members.Members;
return results.ToDictionary(item => item.Item1.SiloAddress, item => item.Item1.Status);
}
public async Task<MembershipEntry[]> GetDetailedHosts(bool onlyActive = false)
{
logger.Info("GetDetailedHosts onlyActive={0}", onlyActive);
var mTable = await GetMembershipTable();
var table = await mTable.ReadAll();
if (onlyActive)
{
return table.Members
.Where(item => item.Item1.Status == SiloStatus.Active)
.Select(x => x.Item1)
.ToArray();
}
return table.Members
.Select(x => x.Item1)
.ToArray();
}
public Task ForceGarbageCollection(SiloAddress[] siloAddresses)
{
var silos = GetSiloAddresses(siloAddresses);
logger.Info("Forcing garbage collection on {0}", Utils.EnumerableToString(silos));
List<Task> actionPromises = PerformPerSiloAction(silos,
s => GetSiloControlReference(s).ForceGarbageCollection());
return Task.WhenAll(actionPromises);
}
public Task ForceActivationCollection(SiloAddress[] siloAddresses, TimeSpan ageLimit)
{
var silos = GetSiloAddresses(siloAddresses);
return Task.WhenAll(GetSiloAddresses(silos).Select(s =>
GetSiloControlReference(s).ForceActivationCollection(ageLimit)));
}
public async Task ForceActivationCollection(TimeSpan ageLimit)
{
Dictionary<SiloAddress, SiloStatus> hosts = await GetHosts(true);
SiloAddress[] silos = hosts.Keys.ToArray();
await ForceActivationCollection(silos, ageLimit);
}
public Task ForceRuntimeStatisticsCollection(SiloAddress[] siloAddresses)
{
var silos = GetSiloAddresses(siloAddresses);
logger.Info("Forcing runtime statistics collection on {0}", Utils.EnumerableToString(silos));
List<Task> actionPromises = PerformPerSiloAction(
silos,
s => GetSiloControlReference(s).ForceRuntimeStatisticsCollection());
return Task.WhenAll(actionPromises);
}
public Task<SiloRuntimeStatistics[]> GetRuntimeStatistics(SiloAddress[] siloAddresses)
{
var silos = GetSiloAddresses(siloAddresses);
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("GetRuntimeStatistics on {0}", Utils.EnumerableToString(silos));
var promises = new List<Task<SiloRuntimeStatistics>>();
foreach (SiloAddress siloAddress in silos)
promises.Add(GetSiloControlReference(siloAddress).GetRuntimeStatistics());
return Task.WhenAll(promises);
}
public async Task<SimpleGrainStatistic[]> GetSimpleGrainStatistics(SiloAddress[] hostsIds)
{
var all = GetSiloAddresses(hostsIds).Select(s =>
GetSiloControlReference(s).GetSimpleGrainStatistics()).ToList();
await Task.WhenAll(all);
return all.SelectMany(s => s.Result).ToArray();
}
public async Task<SimpleGrainStatistic[]> GetSimpleGrainStatistics()
{
Dictionary<SiloAddress, SiloStatus> hosts = await GetHosts(true);
SiloAddress[] silos = hosts.Keys.ToArray();
return await GetSimpleGrainStatistics(silos);
}
public async Task<DetailedGrainStatistic[]> GetDetailedGrainStatistics(string[] types = null, SiloAddress[] hostsIds = null)
{
if (hostsIds == null)
{
Dictionary<SiloAddress, SiloStatus> hosts = await GetHosts(true);
hostsIds = hosts.Keys.ToArray();
}
var all = GetSiloAddresses(hostsIds).Select(s =>
GetSiloControlReference(s).GetDetailedGrainStatistics(types)).ToList();
await Task.WhenAll(all);
return all.SelectMany(s => s.Result).ToArray();
}
public async Task<int> GetGrainActivationCount(GrainReference grainReference)
{
Dictionary<SiloAddress, SiloStatus> hosts = await GetHosts(true);
List<SiloAddress> hostsIds = hosts.Keys.ToList();
var tasks = new List<Task<DetailedGrainReport>>();
foreach (var silo in hostsIds)
tasks.Add(GetSiloControlReference(silo).GetDetailedGrainReport(grainReference.GrainId));
await Task.WhenAll(tasks);
return tasks.Select(s => s.Result).Select(r => r.LocalActivations.Count).Sum();
}
public async Task<string[]> GetActiveGrainTypes(SiloAddress[] hostsIds=null)
{
if (hostsIds == null)
{
Dictionary<SiloAddress, SiloStatus> hosts = await GetHosts(true);
SiloAddress[] silos = hosts.Keys.ToArray();
}
var all = GetSiloAddresses(hostsIds).Select(s => GetSiloControlReference(s).GetGrainTypeList()).ToArray();
await Task.WhenAll(all);
return all.SelectMany(s => s.Result).Distinct().ToArray();
}
public async Task SetCompatibilityStrategy(CompatibilityStrategy strategy)
{
await SetStrategy(
store => store.SetCompatibilityStrategy(strategy),
siloControl => siloControl.SetCompatibilityStrategy(strategy));
}
public async Task SetSelectorStrategy(VersionSelectorStrategy strategy)
{
await SetStrategy(
store => store.SetSelectorStrategy(strategy),
siloControl => siloControl.SetSelectorStrategy(strategy));
}
public async Task SetCompatibilityStrategy(int interfaceId, CompatibilityStrategy strategy)
{
CheckIfIsExistingInterface(interfaceId);
await SetStrategy(
store => store.SetCompatibilityStrategy(interfaceId, strategy),
siloControl => siloControl.SetCompatibilityStrategy(interfaceId, strategy));
}
public async Task SetSelectorStrategy(int interfaceId, VersionSelectorStrategy strategy)
{
CheckIfIsExistingInterface(interfaceId);
await SetStrategy(
store => store.SetSelectorStrategy(interfaceId, strategy),
siloControl => siloControl.SetSelectorStrategy(interfaceId, strategy));
}
public async Task<int> GetTotalActivationCount()
{
Dictionary<SiloAddress, SiloStatus> hosts = await GetHosts(true);
List<SiloAddress> silos = hosts.Keys.ToList();
var tasks = new List<Task<int>>();
foreach (var silo in silos)
tasks.Add(GetSiloControlReference(silo).GetActivationCount());
await Task.WhenAll(tasks);
int sum = 0;
foreach (Task<int> task in tasks)
sum += task.Result;
return sum;
}
public Task<object[]> SendControlCommandToProvider(string providerTypeFullName, string providerName, int command, object arg)
{
return ExecutePerSiloCall(isc => isc.SendControlCommandToProvider(providerTypeFullName, providerName, command, arg),
String.Format("SendControlCommandToProvider of type {0} and name {1} command {2}.", providerTypeFullName, providerName, command));
}
private void CheckIfIsExistingInterface(int interfaceId)
{
Type unused;
var interfaceMap = this.grainTypeManager.ClusterGrainInterfaceMap;
if (!interfaceMap.TryGetServiceInterface(interfaceId, out unused))
{
throw new ArgumentException($"Interface code '{interfaceId} not found", nameof(interfaceId));
}
}
private async Task SetStrategy(Func<IVersionStore, Task> storeFunc, Func<ISiloControl, Task> applyFunc)
{
await storeFunc(versionStore);
var silos = GetSiloAddresses(null);
var actionPromises = PerformPerSiloAction(
silos,
s => applyFunc(GetSiloControlReference(s)));
try
{
await Task.WhenAll(actionPromises);
}
catch (Exception)
{
// ignored: silos that failed to set the new strategy will reload it from the storage
// in the future.
}
}
private async Task<object[]> ExecutePerSiloCall(Func<ISiloControl, Task<object>> action, string actionToLog)
{
var silos = await GetHosts(true);
if(logger.IsEnabled(LogLevel.Debug))
{
logger.Debug("Executing {0} against {1}", actionToLog, Utils.EnumerableToString(silos.Keys));
}
var actionPromises = new List<Task<object>>();
foreach (SiloAddress siloAddress in silos.Keys.ToArray())
actionPromises.Add(action(GetSiloControlReference(siloAddress)));
return await Task.WhenAll(actionPromises);
}
private Task<IMembershipTable> GetMembershipTable()
{
if (!(this.siloStatusOracle is MembershipOracle)) throw new InvalidOperationException("The current membership oracle does not support detailed silo status reporting.");
return Task.FromResult(this.membershipTable);
}
private SiloAddress[] GetSiloAddresses(SiloAddress[] silos)
{
if (silos != null && silos.Length > 0)
return silos;
return this.siloStatusOracle
.GetApproximateSiloStatuses(true).Select(s => s.Key).ToArray();
}
/// <summary>
/// Perform an action for each silo.
/// </summary>
/// <remarks>
/// Because SiloControl contains a reference to a system target, each method call using that reference
/// will get routed either locally or remotely to the appropriate silo instance auto-magically.
/// </remarks>
/// <param name="siloAddresses">List of silos to perform the action for</param>
/// <param name="perSiloAction">The action functiona to be performed for each silo</param>
/// <returns>Array containing one Task for each silo the action was performed for</returns>
private List<Task> PerformPerSiloAction(SiloAddress[] siloAddresses, Func<SiloAddress, Task> perSiloAction)
{
var requestsToSilos = new List<Task>();
foreach (SiloAddress siloAddress in siloAddresses)
requestsToSilos.Add( perSiloAction(siloAddress) );
return requestsToSilos;
}
private static XmlDocument XPathValuesToXml(Dictionary<string,string> values)
{
var doc = new XmlDocument();
if (values == null) return doc;
foreach (var p in values)
{
var path = p.Key.Split('/').ToList();
if (path[0] == "")
path.RemoveAt(0);
if (path[0] != "OrleansConfiguration")
path.Insert(0, "OrleansConfiguration");
if (!path[path.Count - 1].StartsWith("@"))
throw new ArgumentException("XPath " + p.Key + " must end with @attribute");
AddXPathValue(doc, path, p.Value);
}
return doc;
}
private static void AddXPathValue(XmlNode xml, IEnumerable<string> path, string value)
{
if (path == null) return;
var first = path.FirstOrDefault();
if (first == null) return;
if (first.StartsWith("@"))
{
first = first.Substring(1);
if (path.Count() != 1)
throw new ArgumentException("Attribute " + first + " must be last in path");
var e = xml as XmlElement;
if (e == null)
throw new ArgumentException("Attribute " + first + " must be on XML element");
e.SetAttribute(first, value);
return;
}
foreach (var child in xml.ChildNodes)
{
var e = child as XmlElement;
if (e != null && e.LocalName == first)
{
AddXPathValue(e, path.Skip(1), value);
return;
}
}
var empty = (xml as XmlDocument ?? xml.OwnerDocument).CreateElement(first);
xml.AppendChild(empty);
AddXPathValue(empty, path.Skip(1), value);
}
private ISiloControl GetSiloControlReference(SiloAddress silo)
{
return this.internalGrainFactory.GetSystemTarget<ISiloControl>(Constants.SiloControlId, silo);
}
private IMultiClusterOracle GetMultiClusterOracle()
{
if (!this.multiClusterOptions.HasMultiClusterNetwork)
throw new OrleansException("No multicluster network configured");
return this.multiClusterOracle;
}
public Task<List<IMultiClusterGatewayInfo>> GetMultiClusterGateways()
{
return Task.FromResult(GetMultiClusterOracle().GetGateways().Cast<IMultiClusterGatewayInfo>().ToList());
}
public Task<MultiClusterConfiguration> GetMultiClusterConfiguration()
{
return Task.FromResult(GetMultiClusterOracle().GetMultiClusterConfiguration());
}
public async Task<MultiClusterConfiguration> InjectMultiClusterConfiguration(IEnumerable<string> clusters, string comment = "", bool checkForLaggingSilosFirst = true)
{
var multiClusterOracle = GetMultiClusterOracle();
var configuration = new MultiClusterConfiguration(DateTime.UtcNow, clusters.ToList(), comment);
if (!MultiClusterConfiguration.OlderThan(multiClusterOracle.GetMultiClusterConfiguration(), configuration))
throw new OrleansException("Could not inject multi-cluster configuration: current configuration is newer than clock");
if (checkForLaggingSilosFirst)
{
try
{
var laggingSilos = await multiClusterOracle.FindLaggingSilos(multiClusterOracle.GetMultiClusterConfiguration());
if (laggingSilos.Count > 0)
{
var msg = string.Format("Found unstable silos {0}", string.Join(",", laggingSilos));
throw new OrleansException(msg);
}
}
catch (Exception e)
{
throw new OrleansException("Could not inject multi-cluster configuration: stability check failed", e);
}
}
await multiClusterOracle.InjectMultiClusterConfiguration(configuration);
return configuration;
}
public Task<List<SiloAddress>> FindLaggingSilos()
{
var multiClusterOracle = GetMultiClusterOracle();
var expected = multiClusterOracle.GetMultiClusterConfiguration();
return multiClusterOracle.FindLaggingSilos(expected);
}
}
}
| |
//
// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).
// All rights reserved.
//
using System;
using System.Collections.Generic;
using log4net;
using System.Runtime.CompilerServices;
namespace OpenADK.Library.Impl
{
/// <summary> Default implementation of the Topic interface.
///
/// </summary>
/// <author> Eric Petersen
/// </author>
/// <version> Adk 1.0
/// </version>
internal class TopicImpl : ITopic
{
/// <summary> logging framework logging category for this topic</summary>
public ILog log;
/// <summary> The Subscriber registered with this topic.</summary>
internal ISubscriber fSub;
/// <summary> The Publisher registered with this topic.</summary>
internal IPublisher fPub;
/// <summary> The QueryResults registered with this topic.</summary>
internal IQueryResults fQueryResults;
///<summary> The SIF data object type associated with this topic
///</summary>
protected IElementDef fObjType;
/// <summary>
/// The options for publishing
/// </summary>
internal PublishingOptions fPubOpts;
/// <summary>
/// The options for subscriptions
/// </summary>
internal SubscriptionOptions fSubOpts;
///<summary>The SIF Context that this topic is joined to</summary>
private SifContext fContext;
/// <summary> The Zones joined with this topic</summary>
internal List<IZone> fZones = new List<IZone>();
///<summary>The options for QueryResults handling</summary>
public QueryResultsOptions fQueryResultsOptions;
internal TopicImpl(IElementDef objType, SifContext context)
{
fObjType = objType;
fContext = context;
log = LogManager.GetLogger(Agent.LOG_IDENTIFIER + ".Topic$" + objType.Name);
}
/// <summary> Adds a zone to this topic</summary>
/// <param name="zone">The Zone to join with this topic
/// </param>
/// <exception cref="OpenADK.Library.AdkException"> AdkException is thrown if the zone is already joined to a
/// topic or if there is a SIF error during agent registration.
/// </exception>
public void Join(IZone zone)
{
lock (this)
{
// Check that zone is not already joined with this topic
if (fZones.Contains(zone))
{
AdkUtils._throw
(new SystemException
("Zone already joined with topic \"" + fObjType + "\""),
((ZoneImpl) zone).Log);
}
// Check that topic has a Provider, Subscriber, or QueryResults object
if (fSub == null && fPub == null && fQueryResults == null)
{
AdkUtils._throw
(
new SystemException
(
"Agent has not registered a Subscriber, Publisher, or QueryResults object with this topic"),
((ZoneImpl) zone).Log);
}
fZones.Add(zone);
if (zone.Connected)
{
((ZoneImpl) zone).Provision();
}
}
}
/// <summary> Gets the name of the SIF data object type associated with this topic</summary>
/// <returns> The name of a root level SIF data object such as "StudentPersonal",
/// "BusInfo", or "LibraryPatronStatus"
/// </returns>
public string ObjectType
{
get { return fObjType.Name; }
}
/// <summary>
/// see com.OpenADK.Library.Topic#getObjectDef()
/// </summary>
public IElementDef ObjectDef
{
get { return fObjType; }
}
/// <summary>
/// see com.OpenADK.Library.Topic#getSIFContext()
/// </summary>
public SifContext SifContext
{
get { return fContext; }
}
/// <summary> Gets the zones to which this topic is bound</summary>
/// <returns> The zone that created this topic instance
/// </returns>
[MethodImpl(MethodImplOptions.Synchronized)]
public IZone[] GetZones()
{
return fZones.ToArray();
}
/// <summary> Checks that at least one zone is joined with the topic</summary>
/// <exception cref="AdkException"> is thrown if no zones are joined with this topc
/// </exception>
private void _checkZones()
{
if (fZones.Count == 0)
{
throw new AdkException
("No zones are joined with the \"" + fObjType + "\" topic", null);
}
}
/// <summary> Register a publisher of this topic.
///
/// Provisioning messages are sent as follows:
///
/// <ul>
/// <li>
/// If the agent is using Adk-managed provisioning, a <c><
/// SIF_Provide></c> message is sent to the ZIS when the
/// AdkFlags.PROV_PROVIDE flag is specified. When
/// Adk-managed provisioning is disabled, no messages are sent to
/// the zone.
/// </li>
/// <li>
/// If Agent-managed provisioning is enabled, the ProvisioningOptions
/// flags have no affect. The agent must explicitly call the
/// sifProvide method to manually send those message to the zone.
/// </li>
/// <li>
/// If ZIS-managed provisioning is enabled, no provisioning messages
/// are sent by the agent regardless of the ProvisioningOptions
/// used and the methods are called.
/// </li>
/// </ul>
///
///
/// </summary>
/// <param name="publisher">An object that implements the <c>Publisher</c>
/// interface to publish change events and to evaluate SIF queries
/// received by the agent
/// </param>
[MethodImpl(MethodImplOptions.Synchronized)]
public void SetPublisher(IPublisher publisher)
{
SetPublisher( publisher, null );
}
/// <summary> Register a publisher of this topic.
///
/// Provisioning messages are sent as follows:
///
/// <ul>
/// <li>
/// If the agent is using Adk-managed provisioning, a <c><
/// SIF_Provide></c> message is sent to the ZIS when the
/// AdkFlags.PROV_PROVIDE flag is specified. When
/// Adk-managed provisioning is disabled, no messages are sent to
/// the zone.
/// </li>
/// <li>
/// If Agent-managed provisioning is enabled, the ProvisioningOptions
/// flags have no affect. The agent must explicitly call the
/// sifProvide method to manually send those message to the zone.
/// </li>
/// <li>
/// If ZIS-managed provisioning is enabled, no provisioning messages
/// are sent by the agent regardless of the ProvisioningOptions
/// used and the methods are called.
/// </li>
/// </ul>
///
///
/// </summary>
/// <param name="publisher">An object that implements the <c>Publisher</c>
/// interface to publish change events and to evaluate SIF queries
/// received by the agent
/// </param>
/// <param name="provisioningOptions">Allows options to be set, such as whether to register
/// as the default provider of the object type in the zone, and which SIF_Contexts are supported</param>
[MethodImpl(MethodImplOptions.Synchronized)]
public void SetPublisher(IPublisher publisher,
PublishingOptions provisioningOptions)
{
assertProvisioningOptions(provisioningOptions);
if (publisher == null)
{
fPub = null;
fPubOpts = null;
}
else
{
fPub = publisher;
if (provisioningOptions == null)
{
provisioningOptions = new PublishingOptions();
}
fPubOpts = provisioningOptions;
}
}
[MethodImpl(MethodImplOptions.Synchronized)]
public IPublisher GetPublisher()
{
return fPub;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void SetSubscriber(ISubscriber subscriber)
{
SetSubscriber( subscriber, null );
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void SetSubscriber(ISubscriber subscriber,
SubscriptionOptions flags)
{
assertProvisioningOptions(flags);
if (subscriber == null)
{
fSub = null;
fSubOpts = null;
}
else
{
fSub = subscriber;
if (flags == null)
{
flags = new SubscriptionOptions();
}
fSubOpts = flags;
}
}
[MethodImpl(MethodImplOptions.Synchronized)]
public ISubscriber GetSubscriber()
{
return fSub;
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void SetQueryResults(IQueryResults results )
{
SetQueryResults( results, null );
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void SetQueryResults(IQueryResults results, QueryResultsOptions flags)
{
assertProvisioningOptions(flags);
if (results == null)
{
fQueryResults = null;
fQueryResultsOptions = null;
}
else
{
fQueryResults = results;
if (flags == null){
flags = new QueryResultsOptions();
}
fQueryResultsOptions = flags;
}
}
[MethodImpl(MethodImplOptions.Synchronized)]
public IQueryResults GetQueryResultsObject()
{
return fQueryResults;
}
private void assertProvisioningOptions(ProvisioningOptions opts)
{
if (opts != null && opts.SupportedContexts.Count > 1)
{
throw new ArgumentException("Cannot provision a single topic for more than one SIF Context.\r\n" +
"To use Topics with multiple SIF contexts, call TopicFactory.getInstance( ElementDef, SIFContext ).");
}
}
/// <summary> Publishes a change in topic data by sending a SIF_Event to all zones
/// joined with this topic
///
/// This method is useful for communicating a single change event. If an
/// agent changes data that spans several object types, it should consider
/// using the BatchEvent class to publish changes as a group. BatchEvent
/// aggregates changes in multiple SIF data objects, then sends a single
/// SIF_Event message to each zone. This is much more efficient than calling
/// the publishChange method of each Topic, which results in a single
/// SIF_Event message being sent for each object type. Another alternative
/// is to call the publishChange method of each Zone directly. That method
/// accepts an Event object, which can describe changes in multiple data
/// objects.
///
/// </summary>
/// <param name="data">The data that has changed. The objects in this array must all
/// be of the same SIF object type (e.g. all <c>StudentPersonal</c>
/// objects if this topic encapsulates the "StudentPersonal" object type),
/// and must all communicate the same state change (i.e. all added,
/// all changed, or all deleted).
///
/// </param>
[MethodImpl(MethodImplOptions.Synchronized)]
public void PublishEvent(Event data)
{
AdkMessagingException err = null;
_checkZones();
foreach (ZoneImpl z in fZones)
{
try
{
z.fPrimitives.SifEvent(z, data, null, null);
}
catch (Exception th)
{
if (err == null)
{
err =
new AdkMessagingException
("Error publishing event to topic \"" + fObjType + "\"", z);
}
if (th is AdkException)
{
err.Add(th);
}
else
{
err.Add(new AdkMessagingException(th.ToString(), z));
}
}
}
if (err != null)
{
AdkUtils._throw(err, Agent.GetLog());
}
}
public void Query(Query query)
{
Query(query, null, null, 0);
}
public void Query(Query query,
IMessagingListener listener)
{
Query(query, listener, null, 0);
}
public void Query(Query query,
AdkQueryOptions queryOptions)
{
Query(query, null, null, queryOptions);
}
public void Query(Query query,
IMessagingListener listener,
AdkQueryOptions queryOptions)
{
Query(query, listener, null, queryOptions);
}
public void Query(Query query,
string destinationId,
AdkQueryOptions queryOptions)
{
Query(query, null, destinationId, queryOptions);
}
[MethodImpl(MethodImplOptions.Synchronized)]
public void Query(Query query,
IMessagingListener listener,
string destinationId,
AdkQueryOptions queryOptions)
{
if (query == null)
{
AdkUtils._throw
(new ArgumentException("Query object cannot be null"), Agent.GetLog());
}
// Validate that the query object type and SIF Context are valid for this Topic
if (query.ObjectType != fObjType)
{
AdkUtils._throw(new ArgumentException("Query object type: {" + query.ObjectTag +
"} does not match Topic object type: " + fObjType + "}"), log);
}
if (!query.SifContext.Equals(fContext))
{
AdkUtils._throw(new ArgumentException("Query SIF_Context: {" + query.SifContext +
"} does not match Topic SIF_Context: " + fContext + "}"), log);
}
_checkZones();
AdkMessagingException err = null;
// Send the SIF_Request to each zone
foreach (ZoneImpl z in fZones)
{
try
{
z.Query(query, listener, destinationId, queryOptions);
}
catch (Exception th)
{
if (err == null)
{
err =
new AdkMessagingException
("Error querying topic \"" + fObjType + "\"", z);
}
if (th is AdkException)
{
err.Add(th);
}
else
{
err.Add(new AdkMessagingException(th.ToString(), z));
}
}
}
if (err != null)
{
AdkUtils._throw(err, Agent.GetLog());
}
}
public void PurgeQueue(bool incoming,
bool outgoing)
{
}
public override string ToString()
{
return "Topic:" + fObjType.Name;
}
}
}
// Synchronized with Branch_Library-ADK-2.1.0.Version_3.TopicImpl.java
| |
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
namespace TiltBrushToolkit {
public class FbxError : Exception {
public FbxError(string message) : base(message) {}
}
public struct Version {
public int major;
public int minor;
public static Version Parse(string value) {
Match match = Regex.Match(value, @"^([0-9]+)\.([0-9]+)");
if (! match.Success) { throw new ArgumentException("Cannot parse"); }
return new Version {
major = int.Parse(match.Groups[1].Value),
minor = int.Parse(match.Groups[2].Value)
};
}
// The rest of this is boilerplate
public static bool operator <(Version lhs, Version rhs) {
if (lhs.major != rhs.major) {
return lhs.major < rhs.major;
}
return lhs.minor < rhs.minor;
}
public static bool operator >(Version lhs, Version rhs) {
if (lhs.major != rhs.major) {
return lhs.major > rhs.major;
}
return lhs.minor > rhs.minor;
}
public static bool operator ==(Version lhs, Version rhs) {
return (lhs.major == rhs.major && lhs.minor == rhs.minor);
}
public static bool operator !=(Version lhs, Version rhs) {
return !(lhs == rhs);
}
public override string ToString() {
return string.Format("{0}.{1}", major, minor);
}
public override bool Equals(object rhs) {
return this == (Version)rhs;
}
public override int GetHashCode() {
return 0;
}
}
/// Simple and limited fbx reading code - we only need enough to read the 'Tilt Brush' property.
/// FBX format ref found here. Note that it's _not_ correct.
/// https://code.blender.org/2013/08/fbx-binary-file-format-specification/
/// This reference is better:
/// https://github.com/ideasman42/pyfbx_i42/blob/master/pyfbx/parse_bin.py
/// The correct definition is:
///
/// FBX : FILE_HEADER NODE_LIST
/// FILE_HEADER : "Kaydara FBX Binary " 0x00 0x1a 0x00 int32<version>
/// NODE_LIST : NODE+ NULLNODE
/// NODE : NODE_HEADER NODE_PROPERTIES NODE_LIST?
/// presence of NODE_LIST is detected by examining header.end
/// NODE_HEADER :
/// uint32 end absolute offset of node end
/// uint32 props_count number of properties in property list
/// uint32 props_bytes number of bytes in property list
/// uint8-length-prefixed string
/// NODE_PROPERTIES :
/// props_bytes bytes of properties (format not documented here)
/// NULLNODE : 0x00 * 13
public static class FbxUtils {
const int kNodeHeaderSize = 13;
static readonly string[] kFbxUserPropertiesPath =
new[] {"FBXHeaderExtension", "SceneInfo", "Properties70"};
const string kFbxUserPropertiesPath2 =
"FBXHeaderExtension/CreationTimeStamp/SceneInfo/MetaData/Properties70";
public struct FbxInfo {
public bool isFbx;
/// null if not created by Tilt Brush, or version not found
public Version? tiltBrushVersion;
/// null if "RequiredToolkitVersion" not found
public Version? requiredToolkitVersion;
}
static Version? FromDict(Dictionary<string, string> dict, string key) {
string value;
if (! dict.TryGetValue(key, out value)) { return null; }
try {
return Version.Parse(value);
} catch (ArgumentException) {
return null;
}
}
/// Given a path to an arbitrary file, return some info about that file.
public static FbxInfo GetTiltBrushFbxInfo(string path, bool force=false) {
FbxInfo info = new FbxInfo();
if (force || path.ToLowerInvariant().EndsWith(".fbx")) {
IEnumerable<KeyValuePair<string, string>> propsIter = null;
if (IsBinaryFbx(path)) {
propsIter = IterUserPropertiesBinary(path);
} else if (IsAsciiFbx(path)) {
propsIter = IterUserPropertiesAscii(path);
}
if (propsIter != null) {
info.isFbx = true;
var props = new Dictionary<string, string>();
try {
foreach (var pair in propsIter) {
props[pair.Key] = pair.Value;
}
} catch (FbxError) {
// Can't find any properties
}
string name;
if (props.TryGetValue("Original|ApplicationName", out name) && name == "Tilt Brush") {
info.tiltBrushVersion = FromDict(
props, "Original|ApplicationVersion");
}
info.requiredToolkitVersion = FromDict(
props, "Original|RequiredToolkitVersion");
}
}
return info;
}
//
// Binary FBX support
//
/// Returns true if the file might be a binary-format FBX
static bool IsBinaryFbx(string path) {
try {
using (var file = new FileStream(path, FileMode.Open, FileAccess.Read))
using (var reader = new BinaryReader(file)) {
return ReadHeader(reader);
}
} catch (Exception) {
return false;
}
}
/// Returns true if the header was read properly and looks like a binary fbx
static bool ReadHeader(BinaryReader reader) {
string firstTwenty = System.Text.Encoding.ASCII.GetString(reader.ReadBytes(20));
if ((firstTwenty != "Kaydara FBX Binary ")
|| (reader.ReadByte() != 0x00)
|| (reader.ReadByte() != 0x1a)
|| (reader.ReadByte() != 0x00)) {
return false;
}
reader.ReadUInt32(); // Version - unneeded
return true;
}
struct NodeHeader {
public uint endOffset; // absolute
public uint propertyCount; // number of properties
public uint propertyBytes; // number of bytes in property list
public string name;
public bool IsNull { get { return endOffset == 0; } }
}
static string ReadUint8String(BinaryReader reader) {
int len = reader.ReadByte(); // read the name length
return System.Text.Encoding.ASCII.GetString(reader.ReadBytes(len));
}
static NodeHeader ReadNodeHeader(BinaryReader reader) {
return new NodeHeader {
endOffset = reader.ReadUInt32(),
propertyCount = reader.ReadUInt32(),
propertyBytes = reader.ReadUInt32(),
name = ReadUint8String(reader)
};
}
// Returns header of child node of the given name and positions reader at the end
// of the child node's header.
// On error, throws FbxError and reader is undefined.
static NodeHeader FindChildNodeBinary(BinaryReader reader, string name) {
while (true) {
NodeHeader child = ReadNodeHeader(reader);
if (child.name == name) {
return child;
} else if (child.IsNull) {
throw new FbxError(name);
}
reader.BaseStream.Seek(child.endOffset, SeekOrigin.Begin);
}
}
// Returns all properties that can be parsed.
// Leaves reader in undefined position (because maybe not all could be parsed).
// Most of the time, it should be positioned at the beginning of the NODE_LIST.
static List<string> ReadAllProperties(BinaryReader reader, NodeHeader header) {
var props = new List<string>();
for (int i = 0; i < header.propertyCount; ++i) {
char type = reader.ReadChar();
// Don't understand anything but strings
if (type != 'S') { break; }
int length = reader.ReadInt32();
if (length < 0) { break; }
props.Add(System.Text.Encoding.ASCII.GetString(reader.ReadBytes(length)));
}
return props;
}
/// Throws FbxError if user properties can't be found
static IEnumerable<KeyValuePair<string, string>>
IterUserPropertiesBinary(string path) {
using (var file = new FileStream(path, FileMode.Open, FileAccess.Read))
using (var reader = new BinaryReader(file)) {
if (! ReadHeader(reader)) {
yield break;
}
NodeHeader header = new NodeHeader();
foreach (string name in kFbxUserPropertiesPath) {
header = FindChildNodeBinary(reader, name);
// Skip over the properties to get to the NODE_LIST
reader.BaseStream.Seek(header.propertyBytes, SeekOrigin.Current);
}
// A user-property is actually a child node with N properties.
// The 1st property is the user-property name
// The Nth property is the user-property value
while (true) {
NodeHeader propNode = ReadNodeHeader(reader);
if (propNode.IsNull) {
// NODE_LIST is null-terminated
break;
}
// This indicates a user-property node
UnityEngine.Debug.Assert(propNode.name == "P");
List<string> props = ReadAllProperties(reader, propNode);
// Skip any child nodes or properties that ReadAllProperties wasn't able to parse
reader.BaseStream.Seek(propNode.endOffset, SeekOrigin.Begin);
yield return new KeyValuePair<string, string>(props[0], props[props.Count-1]);
}
}
}
//
// Ascii FBX support
//
/// Returns true if the file might be an ASCII-format FBX
static bool IsAsciiFbx(string path) {
using (var file = new FileStream(path, FileMode.Open, FileAccess.Read))
using (var reader = new StreamReader(file)) {
return reader.ReadLine().StartsWith("; FBX");
}
}
// Positions reader after the opening line of the requested child node.
// On error, throws FbxError and position of reader is undefined.
static void FindChildNodeAscii(StreamReader reader, string name) {
// Open-curly with a prefixed name
Regex startRgx = new Regex(@"^\s*([^:]+):.*{");
// Close-curly
Regex endRgx = new Regex(@"^\s*}");
int depth = 0;
while (true) {
string line = reader.ReadLine();
if (line == null) {
throw new FbxError(name);
}
if (startRgx.Match(line).Success) {
depth += 1;
if (depth == 1) {
string matched = startRgx.Match(line).Groups[1].Value;
if (matched == name) {
return;
}
}
} else if (endRgx.Match(line).Success) {
depth -= 1;
if (depth < 0) {
throw new FbxError(name);
}
}
}
}
/// Throws FbxError if user properties can't be found
static public IEnumerable<KeyValuePair<string, string>>
IterUserPropertiesAscii(string path) {
using (var file = new FileStream(path, FileMode.Open, FileAccess.Read))
using (var reader = new StreamReader(file)) {
if (!reader.ReadLine().StartsWith(";")) {
yield break;
}
foreach (string name in kFbxUserPropertiesPath) {
FindChildNodeAscii(reader, name);
}
// Now at the P: section.
// We only care about pure-string properties, so we can assume everything is in ""
// Fbx uses " to escape double-quotes, eg
// P: "Original|ApplicationVendor", "KString", "", "", ""Google'"
// We use this rather complicated regex to parse because the strings may contain ","
Regex propertyRgx = new Regex(@"^\s*P: ""(?<first>[^""]*)""(, ""(?<rest>[^""]*)"")+");
Regex endRgx = new Regex(@"^\s*}");
while (true) {
string line = reader.ReadLine();
if (line == null) {
yield break;
}
Match match = endRgx.Match(line);
if (match.Success) {
yield break;
}
match = propertyRgx.Match(line);
if (match.Success) {
string key = match.Groups["first"].Value;
IEnumerable<Capture> captures = match.Groups["rest"].Captures.Cast<Capture>();
string value = captures.Select(c => c.Value).Last();
yield return new KeyValuePair<string, string>(key, value);
} else {
// Probably some non-string property -- ignore
}
}
}
}
}
}
| |
/* ---------------------------------------------------------------------------
*
* Copyright (c) Routrek Networks, Inc. All Rights Reserved..
*
* This file is a part of the Granados SSH Client Library that is subject to
* the license included in the distributed package.
* You may not use this file except in compliance with the license.
*
* ---------------------------------------------------------------------------
*/
using System;
using System.Text;
using System.IO;
using Routrek.PKI;
namespace Routrek.SSHC {
////////////////////////////////////////////////////////////
/// read/write primitive types
///
internal abstract class SSHDataReader {
protected byte[] _data;
protected int _offset;
public SSHDataReader(byte[] image) {
_data = image;
_offset = 0;
}
public byte[] Image {
get {
return _data;
}
}
public int Offset {
get {
return _offset;
}
}
public int ReadInt32() {
if(_offset+3>=_data.Length) throw new IOException(Strings.GetString("UnexpectedEOF"));
int ret = (((int)_data[_offset])<<24) + (((int)_data[_offset+1])<<16) + (((int)_data[_offset+2])<<8) + _data[_offset+3];
_offset += 4;
return ret;
}
public byte ReadByte() {
if(_offset>=_data.Length) throw new IOException(Strings.GetString("UnexpectedEOF"));
return _data[_offset++];
}
public bool ReadBool() {
if(_offset>=_data.Length) throw new IOException(Strings.GetString("UnexpectedEOF"));
return _data[_offset++]==0? false : true;
}
/**
* multi-precise integer
*/
public abstract BigInteger ReadMPInt();
public byte[] ReadString() {
int length = ReadInt32();
return Read(length);
}
public byte[] Read(int length) {
byte[] image = new byte[length];
for(int i=0; i<image.Length; i++) {
if(_offset==_data.Length) throw new IOException(Strings.GetString("UnexpectedEOF"));
image[i] = _data[_offset++];
}
return image;
}
public byte[] ReadAll() {
byte[] t = new byte[_data.Length - _offset];
Array.Copy(_data, _offset, t, 0, t.Length);
return t;
}
public int Rest {
get {
return _data.Length - _offset;
}
}
}
internal abstract class SSHDataWriter : IKeyWriter {
protected MemoryStream _strm;
public SSHDataWriter() {
_strm = new MemoryStream(512);
}
public byte[] ToByteArray() { return _strm.ToArray(); }
public long Length {
get {
return _strm.Length;
}
}
public void Write(byte[] data) { _strm.Write(data, 0, data.Length); }
public void Write(byte[] data, int offset, int count) { _strm.Write(data, offset, count); }
public void Write(byte data) { _strm.WriteByte(data); }
public void Write(bool data) { _strm.WriteByte(data? (byte)1 : (byte)0); }
public void Write(int data) {
uint udata = (uint)data;
uint a = udata & 0xFF000000;
a >>= 24;
_strm.WriteByte((byte)a);
a = udata & 0x00FF0000;
a >>= 16;
_strm.WriteByte((byte)a);
a = udata & 0x0000FF00;
a >>= 8;
_strm.WriteByte((byte)a);
a = udata & 0x000000FF;
_strm.WriteByte((byte)a);
}
public abstract void Write(BigInteger data);
public void Write(string data) {
Write(data.Length);
if(data.Length>0) Write(Encoding.ASCII.GetBytes(data));
}
public void WriteAsString(byte[] data) {
Write(data.Length);
if(data.Length>0) Write(data);
}
public void WriteAsString(byte[] data, int offset, int length) {
Write(length);
if(length>0) Write(data, offset, length);
}
}
}
namespace Routrek.SSHCV1 {
internal class SSH1DataReader : Routrek.SSHC.SSHDataReader {
public SSH1DataReader(byte[] image) : base(image) {}
public override BigInteger ReadMPInt() {
//first 2 bytes describes the bit count
int bits = (((int)_data[_offset])<<8) + _data[_offset+1];
_offset += 2;
return new BigInteger(Read((bits+7) / 8));
}
}
internal class SSH1DataWriter : Routrek.SSHC.SSHDataWriter {
public override void Write(BigInteger data) {
byte[] image = data.getBytes();
int off = (image[0]==0? 1 : 0);
int len = (image.Length-off) * 8;
int a = len & 0x0000FF00;
a >>= 8;
_strm.WriteByte((byte)a);
a = len & 0x000000FF;
_strm.WriteByte((byte)a);
_strm.Write(image,off,image.Length-off);
}
}
}
namespace Routrek.SSHCV2 {
internal class SSH2DataReader : Routrek.SSHC.SSHDataReader {
public SSH2DataReader(byte[] image) : base(image) {}
//SSH2 Key File Only
public BigInteger ReadBigIntWithBits() {
int bits = ReadInt32();
int bytes = (bits + 7) / 8;
return new BigInteger(Read(bytes));
}
public override BigInteger ReadMPInt() {
return new BigInteger(ReadString());
}
public PacketType ReadPacketType() {
return (PacketType)ReadByte();
}
}
internal class SSH2DataWriter : Routrek.SSHC.SSHDataWriter {
//writes mpint in SSH2 format
public override void Write(BigInteger data) {
byte[] t = data.getBytes();
int len = t.Length;
if(t[0] >= 0x80) {
Write(++len);
Write((byte)0);
}
else
Write(len);
Write(t);
}
public void WriteBigIntWithBits(BigInteger bi) {
Write(bi.bitCount());
Write(bi.getBytes());
}
public void WritePacketType(PacketType pt) {
Write((byte)pt);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Data.Common;
using System.Diagnostics;
using System.Threading;
using System.Transactions;
namespace System.Data.ProviderBase
{
internal abstract partial class DbConnectionInternal
{
private bool _isInStasis;
private Transaction _enlistedTransaction; // [usage must be thread-safe] the transaction that we're enlisted in, either manually or automatically
// _enlistedTransaction is a clone, so that transaction information can be queried even if the original transaction object is disposed.
// However, there are times when we need to know if the original transaction object was disposed, so we keep a reference to it here.
// This field should only be assigned a value at the same time _enlistedTransaction is updated.
// Also, this reference should not be disposed, since we aren't taking ownership of it.
private Transaction _enlistedTransactionOriginal;
protected internal Transaction EnlistedTransaction
{
get
{
return _enlistedTransaction;
}
set
{
Transaction currentEnlistedTransaction = _enlistedTransaction;
if (((null == currentEnlistedTransaction) && (null != value))
|| ((null != currentEnlistedTransaction) && !currentEnlistedTransaction.Equals(value)))
{ // WebData 20000024
// Pay attention to the order here:
// 1) defect from any notifications
// 2) replace the transaction
// 3) re-enlist in notifications for the new transaction
// SQLBUDT #230558 we need to use a clone of the transaction
// when we store it, or we'll end up keeping it past the
// duration of the using block of the TransactionScope
Transaction valueClone = null;
Transaction previousTransactionClone = null;
try
{
if (null != value)
{
valueClone = value.Clone();
}
// NOTE: rather than take locks around several potential round-
// trips to the server, and/or virtual function calls, we simply
// presume that you aren't doing something illegal from multiple
// threads, and check once we get around to finalizing things
// inside a lock.
lock (this)
{
// NOTE: There is still a race condition here, when we are
// called from EnlistTransaction (which cannot re-enlist)
// instead of EnlistDistributedTransaction (which can),
// however this should have been handled by the outer
// connection which checks to ensure that it's OK. The
// only case where we have the race condition is multiple
// concurrent enlist requests to the same connection, which
// is a bit out of line with something we should have to
// support.
// enlisted transaction can be nullified in Dispose call without lock
previousTransactionClone = Interlocked.Exchange(ref _enlistedTransaction, valueClone);
_enlistedTransactionOriginal = value;
value = valueClone;
valueClone = null; // we've stored it, don't dispose it.
}
}
finally
{
// we really need to dispose our clones; they may have
// native resources and GC may not happen soon enough.
// VSDevDiv 479564: don't dispose if still holding reference in _enlistedTransaction
if (null != previousTransactionClone &&
!object.ReferenceEquals(previousTransactionClone, _enlistedTransaction))
{
previousTransactionClone.Dispose();
}
if (null != valueClone && !object.ReferenceEquals(valueClone, _enlistedTransaction))
{
valueClone.Dispose();
}
}
// I don't believe that we need to lock to protect the actual
// enlistment in the transaction; it would only protect us
// against multiple concurrent calls to enlist, which really
// isn't supported anyway.
if (null != value)
{
TransactionOutcomeEnlist(value);
}
}
}
}
/// <summary>
/// Get boolean value that indicates whether the enlisted transaction has been disposed.
/// </summary>
/// <value>
/// True if there is an enlisted transaction, and it has been disposed.
/// False if there is an enlisted transaction that has not been disposed, or if the transaction reference is null.
/// </value>
/// <remarks>
/// This method must be called while holding a lock on the DbConnectionInternal instance.
/// </remarks>
protected bool EnlistedTransactionDisposed
{
get
{
// Until the Transaction.Disposed property is public it is necessary to access a member
// that throws if the object is disposed to determine if in fact the transaction is disposed.
try
{
bool disposed;
Transaction currentEnlistedTransactionOriginal = _enlistedTransactionOriginal;
if (currentEnlistedTransactionOriginal != null)
{
disposed = currentEnlistedTransactionOriginal.TransactionInformation == null;
}
else
{
// Don't expect to get here in the general case,
// Since this getter is called by CheckEnlistedTransactionBinding
// after checking for a non-null enlisted transaction (and it does so under lock).
disposed = false;
}
return disposed;
}
catch (ObjectDisposedException)
{
return true;
}
}
}
internal bool IsTxRootWaitingForTxEnd
{
get
{
return _isInStasis;
}
}
virtual protected bool UnbindOnTransactionCompletion
{
get
{
return true;
}
}
// Is this a connection that must be put in stasis (or is already in stasis) pending the end of it's transaction?
virtual protected internal bool IsNonPoolableTransactionRoot
{
get
{
return false; // if you want to have delegated transactions that are non-poolable, you better override this...
}
}
virtual internal bool IsTransactionRoot
{
get
{
return false; // if you want to have delegated transactions, you better override this...
}
}
virtual protected bool ReadyToPrepareTransaction
{
get
{
return true;
}
}
abstract protected void Activate(Transaction transaction);
internal void ActivateConnection(Transaction transaction)
{
// Internal method called from the connection pooler so we don't expose
// the Activate method publicly.
Activate(transaction);
}
internal virtual void CloseConnection(DbConnection owningObject, DbConnectionFactory connectionFactory)
{
// The implementation here is the implementation required for the
// "open" internal connections, since our own private "closed"
// singleton internal connection objects override this method to
// prevent anything funny from happening (like disposing themselves
// or putting them into a connection pool)
//
// Derived class should override DbConnectionInternal.Deactivate and DbConnectionInternal.Dispose
// for cleaning up after DbConnection.Close
// protected override void Deactivate() { // override DbConnectionInternal.Close
// // do derived class connection deactivation for both pooled & non-pooled connections
// }
// public override void Dispose() { // override DbConnectionInternal.Close
// // do derived class cleanup
// base.Dispose();
// }
//
// overriding DbConnection.Close is also possible, but must provider for their own synchronization
// public override void Close() { // override DbConnection.Close
// base.Close();
// // do derived class outer connection for both pooled & non-pooled connections
// // user must do their own synchronization here
// }
//
// if the DbConnectionInternal derived class needs to close the connection it should
// delegate to the DbConnection if one exists or directly call dispose
// DbConnection owningObject = (DbConnection)Owner;
// if (null != owningObject) {
// owningObject.Close(); // force the closed state on the outer object.
// }
// else {
// Dispose();
// }
//
////////////////////////////////////////////////////////////////
// DON'T MESS WITH THIS CODE UNLESS YOU KNOW WHAT YOU'RE DOING!
////////////////////////////////////////////////////////////////
Debug.Assert(null != owningObject, "null owningObject");
Debug.Assert(null != connectionFactory, "null connectionFactory");
// if an exception occurs after the state change but before the try block
// the connection will be stuck in OpenBusy state. The commented out try-catch
// block doesn't really help because a ThreadAbort during the finally block
// would just revert the connection to a bad state.
// Open->Closed: guarantee internal connection is returned to correct pool
if (connectionFactory.SetInnerConnectionFrom(owningObject, DbConnectionOpenBusy.SingletonInstance, this))
{
// Lock to prevent race condition with cancellation
lock (this)
{
object lockToken = ObtainAdditionalLocksForClose();
try
{
PrepareForCloseConnection();
DbConnectionPool connectionPool = Pool;
// Detach from enlisted transactions that are no longer active on close
DetachCurrentTransactionIfEnded();
// The singleton closed classes won't have owners and
// connection pools, and we won't want to put them back
// into the pool.
if (null != connectionPool)
{
connectionPool.PutObject(this, owningObject); // PutObject calls Deactivate for us...
// NOTE: Before we leave the PutObject call, another
// thread may have already popped the connection from
// the pool, so don't expect to be able to verify it.
}
else
{
Deactivate(); // ensure we de-activate non-pooled connections, or the data readers and transactions may not get cleaned up...
// To prevent an endless recursion, we need to clear
// the owning object before we call dispose so that
// we can't get here a second time... Ordinarily, I
// would call setting the owner to null a hack, but
// this is safe since we're about to dispose the
// object and it won't have an owner after that for
// certain.
_owningObject.Target = null;
if (IsTransactionRoot)
{
SetInStasis();
}
else
{
Dispose();
}
}
}
finally
{
ReleaseAdditionalLocksForClose(lockToken);
// if a ThreadAbort puts us here then its possible the outer connection will not reference
// this and this will be orphaned, not reclaimed by object pool until outer connection goes out of scope.
connectionFactory.SetInnerConnectionEvent(owningObject, DbConnectionClosedPreviouslyOpened.SingletonInstance);
}
}
}
}
virtual internal void DelegatedTransactionEnded()
{
// Called by System.Transactions when the delegated transaction has
// completed. We need to make closed connections that are in stasis
// available again, or disposed closed/leaked non-pooled connections.
// IMPORTANT NOTE: You must have taken a lock on the object before
// you call this method to prevent race conditions with Clear and
// ReclaimEmancipatedObjects.
if (1 == _pooledCount)
{
// When _pooledCount is 1, it indicates a closed, pooled,
// connection so it is ready to put back into the pool for
// general use.
TerminateStasis(true);
Deactivate(); // call it one more time just in case
DbConnectionPool pool = Pool;
if (null == pool)
{
throw ADP.InternalError(ADP.InternalErrorCode.PooledObjectWithoutPool); // pooled connection does not have a pool
}
pool.PutObjectFromTransactedPool(this);
}
else if (-1 == _pooledCount && !_owningObject.IsAlive)
{
// When _pooledCount is -1 and the owning object no longer exists,
// it indicates a closed (or leaked), non-pooled connection so
// it is safe to dispose.
TerminateStasis(false);
Deactivate(); // call it one more time just in case
// it's a non-pooled connection, we need to dispose of it
// once and for all, or the server will have fits about us
// leaving connections open until the client-side GC kicks
// in.
Dispose();
}
// When _pooledCount is 0, the connection is a pooled connection
// that is either open (if the owning object is alive) or leaked (if
// the owning object is not alive) In either case, we can't muck
// with the connection here.
}
public virtual void Dispose()
{
_connectionPool = null;
_connectionIsDoomed = true;
_enlistedTransactionOriginal = null; // should not be disposed
// Dispose of the _enlistedTransaction since it is a clone
// of the original reference.
// VSDD 780271 - _enlistedTransaction can be changed by another thread (TX end event)
Transaction enlistedTransaction = Interlocked.Exchange(ref _enlistedTransaction, null);
if (enlistedTransaction != null)
{
enlistedTransaction.Dispose();
}
}
abstract public void EnlistTransaction(Transaction transaction);
// Cleanup connection's transaction-specific structures (currently used by Delegated transaction).
// This is a separate method because cleanup can be triggered in multiple ways for a delegated
// transaction.
virtual protected void CleanupTransactionOnCompletion(Transaction transaction)
{
}
internal void DetachCurrentTransactionIfEnded()
{
Transaction enlistedTransaction = EnlistedTransaction;
if (enlistedTransaction != null)
{
bool transactionIsDead;
try
{
transactionIsDead = (TransactionStatus.Active != enlistedTransaction.TransactionInformation.Status);
}
catch (TransactionException)
{
// If the transaction is being processed (i.e. is part way through a rollback\commit\etc then TransactionInformation.Status will throw an exception)
transactionIsDead = true;
}
if (transactionIsDead)
{
DetachTransaction(enlistedTransaction, true);
}
}
}
// Detach transaction from connection.
internal void DetachTransaction(Transaction transaction, bool isExplicitlyReleasing)
{
// potentially a multi-threaded event, so lock the connection to make sure we don't enlist in a new
// transaction between compare and assignment. No need to short circuit outside of lock, since failed comparisons should
// be the exception, not the rule.
lock (this)
{
// Detach if detach-on-end behavior, or if outer connection was closed
DbConnection owner = (DbConnection)Owner;
if (isExplicitlyReleasing || UnbindOnTransactionCompletion || null == owner)
{
Transaction currentEnlistedTransaction = _enlistedTransaction;
if (currentEnlistedTransaction != null && transaction.Equals(currentEnlistedTransaction))
{
EnlistedTransaction = null;
if (IsTxRootWaitingForTxEnd)
{
DelegatedTransactionEnded();
}
}
}
}
}
// Handle transaction detach, pool cleanup and other post-transaction cleanup tasks associated with
internal void CleanupConnectionOnTransactionCompletion(Transaction transaction)
{
DetachTransaction(transaction, false);
DbConnectionPool pool = Pool;
if (null != pool)
{
pool.TransactionEnded(transaction, this);
}
}
void TransactionCompletedEvent(object sender, TransactionEventArgs e)
{
Transaction transaction = e.Transaction;
CleanupTransactionOnCompletion(transaction);
CleanupConnectionOnTransactionCompletion(transaction);
}
private void TransactionOutcomeEnlist(Transaction transaction)
{
transaction.TransactionCompleted += new TransactionCompletedEventHandler(TransactionCompletedEvent);
}
internal void SetInStasis()
{
_isInStasis = true;
}
private void TerminateStasis(bool returningToPool)
{
_isInStasis = false;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// Don't entity encode high chars (160 to 256)
#define ENTITY_ENCODE_HIGH_ASCII_CHARS
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Net.Configuration;
using System.Runtime.Versioning;
using System.Text;
namespace System.Net
{
public static class WebUtility
{
// some consts copied from Char / CharUnicodeInfo since we don't have friend access to those types
private const char HIGH_SURROGATE_START = '\uD800';
private const char LOW_SURROGATE_START = '\uDC00';
private const char LOW_SURROGATE_END = '\uDFFF';
private const int UNICODE_PLANE00_END = 0x00FFFF;
private const int UNICODE_PLANE01_START = 0x10000;
private const int UNICODE_PLANE16_END = 0x10FFFF;
private const int UnicodeReplacementChar = '\uFFFD';
private static readonly UnicodeDecodingConformance s_htmlDecodeConformance;
private static readonly UnicodeEncodingConformance s_htmlEncodeConformance;
static WebUtility()
{
s_htmlDecodeConformance = UnicodeDecodingConformance.Strict;
s_htmlEncodeConformance = UnicodeEncodingConformance.Strict;
}
#region HtmlEncode / HtmlDecode methods
private static readonly char[] s_htmlEntityEndingChars = new char[] { ';', '&' };
public static string HtmlEncode(string value)
{
if (String.IsNullOrEmpty(value))
{
return value;
}
// Don't create string writer if we don't have nothing to encode
int index = IndexOfHtmlEncodingChars(value, 0);
if (index == -1)
{
return value;
}
LowLevelStringWriter writer = new LowLevelStringWriter();
HtmlEncode(value, writer);
return writer.ToString();
}
private static unsafe void HtmlEncode(string value, LowLevelTextWriter output)
{
if (value == null)
{
return;
}
if (output == null)
{
throw new ArgumentNullException("output");
}
int index = IndexOfHtmlEncodingChars(value, 0);
if (index == -1)
{
output.Write(value);
return;
}
Debug.Assert(0 <= index && index <= value.Length, "0 <= index && index <= value.Length");
int cch = value.Length - index;
fixed (char* str = value)
{
char* pch = str;
while (index-- > 0)
{
output.Write(*pch++);
}
for (; cch > 0; cch--, pch++)
{
char ch = *pch;
if (ch <= '>')
{
switch (ch)
{
case '<':
output.Write("<");
break;
case '>':
output.Write(">");
break;
case '"':
output.Write(""");
break;
case '\'':
output.Write("'");
break;
case '&':
output.Write("&");
break;
default:
output.Write(ch);
break;
}
}
else
{
int valueToEncode = -1; // set to >= 0 if needs to be encoded
#if ENTITY_ENCODE_HIGH_ASCII_CHARS
if (ch >= 160 && ch < 256)
{
// The seemingly arbitrary 160 comes from RFC
valueToEncode = ch;
}
else
#endif // ENTITY_ENCODE_HIGH_ASCII_CHARS
if (s_htmlEncodeConformance == UnicodeEncodingConformance.Strict && Char.IsSurrogate(ch))
{
int scalarValue = GetNextUnicodeScalarValueFromUtf16Surrogate(ref pch, ref cch);
if (scalarValue >= UNICODE_PLANE01_START)
{
valueToEncode = scalarValue;
}
else
{
// Don't encode BMP characters (like U+FFFD) since they wouldn't have
// been encoded if explicitly present in the string anyway.
ch = (char)scalarValue;
}
}
if (valueToEncode >= 0)
{
// value needs to be encoded
output.Write("&#");
output.Write(valueToEncode.ToString(CultureInfo.InvariantCulture));
output.Write(';');
}
else
{
// write out the character directly
output.Write(ch);
}
}
}
}
}
public static string HtmlDecode(string value)
{
if (String.IsNullOrEmpty(value))
{
return value;
}
// Don't create string writer if we don't have nothing to encode
if (!StringRequiresHtmlDecoding(value))
{
return value;
}
LowLevelStringWriter writer = new LowLevelStringWriter();
HtmlDecode(value, writer);
return writer.ToString();
}
[SuppressMessage("Microsoft.Usage", "CA1806:DoNotIgnoreMethodResults", MessageId = "System.UInt16.TryParse(System.String,System.Globalization.NumberStyles,System.IFormatProvider,System.UInt16@)", Justification = "UInt16.TryParse guarantees that result is zero if the parse fails.")]
private static void HtmlDecode(string value, LowLevelTextWriter output)
{
if (value == null)
{
return;
}
if (output == null)
{
throw new ArgumentNullException("output");
}
if (!StringRequiresHtmlDecoding(value))
{
output.Write(value); // good as is
return;
}
int l = value.Length;
for (int i = 0; i < l; i++)
{
char ch = value[i];
if (ch == '&')
{
// We found a '&'. Now look for the next ';' or '&'. The idea is that
// if we find another '&' before finding a ';', then this is not an entity,
// and the next '&' might start a real entity (VSWhidbey 275184)
int index = value.IndexOfAny(s_htmlEntityEndingChars, i + 1);
if (index > 0 && value[index] == ';')
{
string entity = value.Substring(i + 1, index - i - 1);
if (entity.Length > 1 && entity[0] == '#')
{
// The # syntax can be in decimal or hex, e.g.
// å --> decimal
// å --> same char in hex
// See http://www.w3.org/TR/REC-html40/charset.html#entities
bool parsedSuccessfully;
uint parsedValue;
if (entity[1] == 'x' || entity[1] == 'X')
{
parsedSuccessfully = UInt32.TryParse(entity.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out parsedValue);
}
else
{
parsedSuccessfully = UInt32.TryParse(entity.Substring(1), NumberStyles.Integer, CultureInfo.InvariantCulture, out parsedValue);
}
if (parsedSuccessfully)
{
switch (s_htmlDecodeConformance)
{
case UnicodeDecodingConformance.Strict:
// decoded character must be U+0000 .. U+10FFFF, excluding surrogates
parsedSuccessfully = ((parsedValue < HIGH_SURROGATE_START) || (LOW_SURROGATE_END < parsedValue && parsedValue <= UNICODE_PLANE16_END));
break;
case UnicodeDecodingConformance.Compat:
// decoded character must be U+0001 .. U+FFFF
// null chars disallowed for compat with 4.0
parsedSuccessfully = (0 < parsedValue && parsedValue <= UNICODE_PLANE00_END);
break;
case UnicodeDecodingConformance.Loose:
// decoded character must be U+0000 .. U+10FFFF
parsedSuccessfully = (parsedValue <= UNICODE_PLANE16_END);
break;
default:
Debug.Assert(false, "Should never get here!");
parsedSuccessfully = false;
break;
}
}
if (parsedSuccessfully)
{
if (parsedValue <= UNICODE_PLANE00_END)
{
// single character
output.Write((char)parsedValue);
}
else
{
// multi-character
char leadingSurrogate, trailingSurrogate;
ConvertSmpToUtf16(parsedValue, out leadingSurrogate, out trailingSurrogate);
output.Write(leadingSurrogate);
output.Write(trailingSurrogate);
}
i = index; // already looked at everything until semicolon
continue;
}
}
else
{
i = index; // already looked at everything until semicolon
char entityChar = HtmlEntities.Lookup(entity);
if (entityChar != (char)0)
{
ch = entityChar;
}
else
{
output.Write('&');
output.Write(entity);
output.Write(';');
continue;
}
}
}
}
output.Write(ch);
}
}
private static unsafe int IndexOfHtmlEncodingChars(string s, int startPos)
{
Debug.Assert(0 <= startPos && startPos <= s.Length, "0 <= startPos && startPos <= s.Length");
int cch = s.Length - startPos;
fixed (char* str = s)
{
for (char* pch = &str[startPos]; cch > 0; pch++, cch--)
{
char ch = *pch;
if (ch <= '>')
{
switch (ch)
{
case '<':
case '>':
case '"':
case '\'':
case '&':
return s.Length - cch;
}
}
#if ENTITY_ENCODE_HIGH_ASCII_CHARS
else if (ch >= 160 && ch < 256)
{
return s.Length - cch;
}
#endif // ENTITY_ENCODE_HIGH_ASCII_CHARS
else if (s_htmlEncodeConformance == UnicodeEncodingConformance.Strict && Char.IsSurrogate(ch))
{
return s.Length - cch;
}
}
}
return -1;
}
#endregion
#region UrlEncode implementation
// *** Source: alm/tfs_core/Framework/Common/UriUtility/HttpUtility.cs
// This specific code was copied from above ASP.NET codebase.
private static byte[] UrlEncode(byte[] bytes, int offset, int count, bool alwaysCreateNewReturnValue)
{
byte[] encoded = UrlEncode(bytes, offset, count);
return (alwaysCreateNewReturnValue && (encoded != null) && (encoded == bytes))
? (byte[])encoded.Clone()
: encoded;
}
private static byte[] UrlEncode(byte[] bytes, int offset, int count)
{
if (!ValidateUrlEncodingParameters(bytes, offset, count))
{
return null;
}
int cSpaces = 0;
int cUnsafe = 0;
// count them first
for (int i = 0; i < count; i++)
{
char ch = (char)bytes[offset + i];
if (ch == ' ')
cSpaces++;
else if (!IsUrlSafeChar(ch))
cUnsafe++;
}
// nothing to expand?
if (cSpaces == 0 && cUnsafe == 0)
return bytes;
// expand not 'safe' characters into %XX, spaces to +s
byte[] expandedBytes = new byte[count + cUnsafe * 2];
int pos = 0;
for (int i = 0; i < count; i++)
{
byte b = bytes[offset + i];
char ch = (char)b;
if (IsUrlSafeChar(ch))
{
expandedBytes[pos++] = b;
}
else if (ch == ' ')
{
expandedBytes[pos++] = (byte)'+';
}
else
{
expandedBytes[pos++] = (byte)'%';
expandedBytes[pos++] = (byte)IntToHex((b >> 4) & 0xf);
expandedBytes[pos++] = (byte)IntToHex(b & 0x0f);
}
}
return expandedBytes;
}
#endregion
#region UrlEncode public methods
[SuppressMessage("Microsoft.Design", "CA1055:UriReturnValuesShouldNotBeStrings", Justification = "Already shipped public API; code moved here as part of API consolidation")]
public static string UrlEncode(string value)
{
if (value == null)
return null;
byte[] bytes = Encoding.UTF8.GetBytes(value);
byte[] encodedBytes = UrlEncode(bytes, 0, bytes.Length, false /* alwaysCreateNewReturnValue */);
return Encoding.UTF8.GetString(encodedBytes, 0, encodedBytes.Length);
}
public static byte[] UrlEncodeToBytes(byte[] value, int offset, int count)
{
return UrlEncode(value, offset, count, true /* alwaysCreateNewReturnValue */);
}
#endregion
#region UrlDecode implementation
// *** Source: alm/tfs_core/Framework/Common/UriUtility/HttpUtility.cs
// This specific code was copied from above ASP.NET codebase.
// Changes done - Removed the logic to handle %Uxxxx as it is not standards compliant.
private static string UrlDecodeInternal(string value, Encoding encoding)
{
if (value == null)
{
return null;
}
int count = value.Length;
UrlDecoder helper = new UrlDecoder(count, encoding);
// go through the string's chars collapsing %XX and
// appending each char as char, with exception of %XX constructs
// that are appended as bytes
for (int pos = 0; pos < count; pos++)
{
char ch = value[pos];
if (ch == '+')
{
ch = ' ';
}
else if (ch == '%' && pos < count - 2)
{
int h1 = HexToInt(value[pos + 1]);
int h2 = HexToInt(value[pos + 2]);
if (h1 >= 0 && h2 >= 0)
{ // valid 2 hex chars
byte b = (byte)((h1 << 4) | h2);
pos += 2;
// don't add as char
helper.AddByte(b);
continue;
}
}
if ((ch & 0xFF80) == 0)
helper.AddByte((byte)ch); // 7 bit have to go as bytes because of Unicode
else
helper.AddChar(ch);
}
return helper.GetString();
}
private static byte[] UrlDecodeInternal(byte[] bytes, int offset, int count)
{
if (!ValidateUrlEncodingParameters(bytes, offset, count))
{
return null;
}
int decodedBytesCount = 0;
byte[] decodedBytes = new byte[count];
for (int i = 0; i < count; i++)
{
int pos = offset + i;
byte b = bytes[pos];
if (b == '+')
{
b = (byte)' ';
}
else if (b == '%' && i < count - 2)
{
int h1 = HexToInt((char)bytes[pos + 1]);
int h2 = HexToInt((char)bytes[pos + 2]);
if (h1 >= 0 && h2 >= 0)
{ // valid 2 hex chars
b = (byte)((h1 << 4) | h2);
i += 2;
}
}
decodedBytes[decodedBytesCount++] = b;
}
if (decodedBytesCount < decodedBytes.Length)
{
decodedBytes = ArrayT<byte>.Resize(decodedBytes, decodedBytesCount, decodedBytesCount);
}
return decodedBytes;
}
#endregion
#region UrlDecode public methods
[SuppressMessage("Microsoft.Design", "CA1055:UriReturnValuesShouldNotBeStrings", Justification = "Already shipped public API; code moved here as part of API consolidation")]
public static string UrlDecode(string encodedValue)
{
if (encodedValue == null)
return null;
return UrlDecodeInternal(encodedValue, Encoding.UTF8);
}
public static byte[] UrlDecodeToBytes(byte[] encodedValue, int offset, int count)
{
return UrlDecodeInternal(encodedValue, offset, count);
}
#endregion
#region Helper methods
// similar to Char.ConvertFromUtf32, but doesn't check arguments or generate strings
// input is assumed to be an SMP character
private static void ConvertSmpToUtf16(uint smpChar, out char leadingSurrogate, out char trailingSurrogate)
{
Debug.Assert(UNICODE_PLANE01_START <= smpChar && smpChar <= UNICODE_PLANE16_END);
int utf32 = (int)(smpChar - UNICODE_PLANE01_START);
leadingSurrogate = (char)((utf32 / 0x400) + HIGH_SURROGATE_START);
trailingSurrogate = (char)((utf32 % 0x400) + LOW_SURROGATE_START);
}
private static unsafe int GetNextUnicodeScalarValueFromUtf16Surrogate(ref char* pch, ref int charsRemaining)
{
// invariants
Debug.Assert(charsRemaining >= 1);
Debug.Assert(Char.IsSurrogate(*pch));
if (charsRemaining <= 1)
{
// not enough characters remaining to resurrect the original scalar value
return UnicodeReplacementChar;
}
char leadingSurrogate = pch[0];
char trailingSurrogate = pch[1];
if (Char.IsSurrogatePair(leadingSurrogate, trailingSurrogate))
{
// we're going to consume an extra char
pch++;
charsRemaining--;
// below code is from Char.ConvertToUtf32, but without the checks (since we just performed them)
return (((leadingSurrogate - HIGH_SURROGATE_START) * 0x400) + (trailingSurrogate - LOW_SURROGATE_START) + UNICODE_PLANE01_START);
}
else
{
// unmatched surrogate
return UnicodeReplacementChar;
}
}
private static int HexToInt(char h)
{
return (h >= '0' && h <= '9') ? h - '0' :
(h >= 'a' && h <= 'f') ? h - 'a' + 10 :
(h >= 'A' && h <= 'F') ? h - 'A' + 10 :
-1;
}
private static char IntToHex(int n)
{
Debug.Assert(n < 0x10);
if (n <= 9)
return (char)(n + (int)'0');
else
return (char)(n - 10 + (int)'A');
}
// Set of safe chars, from RFC 1738.4 minus '+'
private static bool IsUrlSafeChar(char ch)
{
if (ch >= 'a' && ch <= 'z' || ch >= 'A' && ch <= 'Z' || ch >= '0' && ch <= '9')
return true;
switch (ch)
{
case '-':
case '_':
case '.':
case '!':
case '*':
case '(':
case ')':
return true;
}
return false;
}
private static bool ValidateUrlEncodingParameters(byte[] bytes, int offset, int count)
{
if (bytes == null && count == 0)
return false;
if (bytes == null)
{
throw new ArgumentNullException("bytes");
}
if (offset < 0 || offset > bytes.Length)
{
throw new ArgumentOutOfRangeException("offset");
}
if (count < 0 || offset + count > bytes.Length)
{
throw new ArgumentOutOfRangeException("count");
}
return true;
}
private static bool StringRequiresHtmlDecoding(string s)
{
if (s_htmlDecodeConformance == UnicodeDecodingConformance.Compat)
{
// this string requires html decoding only if it contains '&'
return (s.IndexOf('&') >= 0);
}
else
{
// this string requires html decoding if it contains '&' or a surrogate character
for (int i = 0; i < s.Length; i++)
{
char c = s[i];
if (c == '&' || Char.IsSurrogate(c))
{
return true;
}
}
return false;
}
}
#endregion
#region UrlDecoder nested class
// *** Source: alm/tfs_core/Framework/Common/UriUtility/HttpUtility.cs
// This specific code was copied from above ASP.NET codebase.
// Internal class to facilitate URL decoding -- keeps char buffer and byte buffer, allows appending of either chars or bytes
private class UrlDecoder
{
private int _bufferSize;
// Accumulate characters in a special array
private int _numChars;
private char[] _charBuffer;
// Accumulate bytes for decoding into characters in a special array
private int _numBytes;
private byte[] _byteBuffer;
// Encoding to convert chars to bytes
private Encoding _encoding;
private void FlushBytes()
{
if (_numBytes > 0)
{
_numChars += _encoding.GetChars(_byteBuffer, 0, _numBytes, _charBuffer, _numChars);
_numBytes = 0;
}
}
internal UrlDecoder(int bufferSize, Encoding encoding)
{
_bufferSize = bufferSize;
_encoding = encoding;
_charBuffer = new char[bufferSize];
// byte buffer created on demand
}
internal void AddChar(char ch)
{
if (_numBytes > 0)
FlushBytes();
_charBuffer[_numChars++] = ch;
}
internal void AddByte(byte b)
{
if (_byteBuffer == null)
_byteBuffer = new byte[_bufferSize];
_byteBuffer[_numBytes++] = b;
}
internal String GetString()
{
if (_numBytes > 0)
FlushBytes();
if (_numChars > 0)
return new String(_charBuffer, 0, _numChars);
else
return String.Empty;
}
}
#endregion
#region HtmlEntities nested class
// helper class for lookup of HTML encoding entities
private static class HtmlEntities
{
// The list is from http://www.w3.org/TR/REC-html40/sgml/entities.html, except for ', which
// is defined in http://www.w3.org/TR/2008/REC-xml-20081126/#sec-predefined-ent.
private static String[] s_entitiesList = new String[] {
"\x0022-quot",
"\x0026-amp",
"\x0027-apos",
"\x003c-lt",
"\x003e-gt",
"\x00a0-nbsp",
"\x00a1-iexcl",
"\x00a2-cent",
"\x00a3-pound",
"\x00a4-curren",
"\x00a5-yen",
"\x00a6-brvbar",
"\x00a7-sect",
"\x00a8-uml",
"\x00a9-copy",
"\x00aa-ordf",
"\x00ab-laquo",
"\x00ac-not",
"\x00ad-shy",
"\x00ae-reg",
"\x00af-macr",
"\x00b0-deg",
"\x00b1-plusmn",
"\x00b2-sup2",
"\x00b3-sup3",
"\x00b4-acute",
"\x00b5-micro",
"\x00b6-para",
"\x00b7-middot",
"\x00b8-cedil",
"\x00b9-sup1",
"\x00ba-ordm",
"\x00bb-raquo",
"\x00bc-frac14",
"\x00bd-frac12",
"\x00be-frac34",
"\x00bf-iquest",
"\x00c0-Agrave",
"\x00c1-Aacute",
"\x00c2-Acirc",
"\x00c3-Atilde",
"\x00c4-Auml",
"\x00c5-Aring",
"\x00c6-AElig",
"\x00c7-Ccedil",
"\x00c8-Egrave",
"\x00c9-Eacute",
"\x00ca-Ecirc",
"\x00cb-Euml",
"\x00cc-Igrave",
"\x00cd-Iacute",
"\x00ce-Icirc",
"\x00cf-Iuml",
"\x00d0-ETH",
"\x00d1-Ntilde",
"\x00d2-Ograve",
"\x00d3-Oacute",
"\x00d4-Ocirc",
"\x00d5-Otilde",
"\x00d6-Ouml",
"\x00d7-times",
"\x00d8-Oslash",
"\x00d9-Ugrave",
"\x00da-Uacute",
"\x00db-Ucirc",
"\x00dc-Uuml",
"\x00dd-Yacute",
"\x00de-THORN",
"\x00df-szlig",
"\x00e0-agrave",
"\x00e1-aacute",
"\x00e2-acirc",
"\x00e3-atilde",
"\x00e4-auml",
"\x00e5-aring",
"\x00e6-aelig",
"\x00e7-ccedil",
"\x00e8-egrave",
"\x00e9-eacute",
"\x00ea-ecirc",
"\x00eb-euml",
"\x00ec-igrave",
"\x00ed-iacute",
"\x00ee-icirc",
"\x00ef-iuml",
"\x00f0-eth",
"\x00f1-ntilde",
"\x00f2-ograve",
"\x00f3-oacute",
"\x00f4-ocirc",
"\x00f5-otilde",
"\x00f6-ouml",
"\x00f7-divide",
"\x00f8-oslash",
"\x00f9-ugrave",
"\x00fa-uacute",
"\x00fb-ucirc",
"\x00fc-uuml",
"\x00fd-yacute",
"\x00fe-thorn",
"\x00ff-yuml",
"\x0152-OElig",
"\x0153-oelig",
"\x0160-Scaron",
"\x0161-scaron",
"\x0178-Yuml",
"\x0192-fnof",
"\x02c6-circ",
"\x02dc-tilde",
"\x0391-Alpha",
"\x0392-Beta",
"\x0393-Gamma",
"\x0394-Delta",
"\x0395-Epsilon",
"\x0396-Zeta",
"\x0397-Eta",
"\x0398-Theta",
"\x0399-Iota",
"\x039a-Kappa",
"\x039b-Lambda",
"\x039c-Mu",
"\x039d-Nu",
"\x039e-Xi",
"\x039f-Omicron",
"\x03a0-Pi",
"\x03a1-Rho",
"\x03a3-Sigma",
"\x03a4-Tau",
"\x03a5-Upsilon",
"\x03a6-Phi",
"\x03a7-Chi",
"\x03a8-Psi",
"\x03a9-Omega",
"\x03b1-alpha",
"\x03b2-beta",
"\x03b3-gamma",
"\x03b4-delta",
"\x03b5-epsilon",
"\x03b6-zeta",
"\x03b7-eta",
"\x03b8-theta",
"\x03b9-iota",
"\x03ba-kappa",
"\x03bb-lambda",
"\x03bc-mu",
"\x03bd-nu",
"\x03be-xi",
"\x03bf-omicron",
"\x03c0-pi",
"\x03c1-rho",
"\x03c2-sigmaf",
"\x03c3-sigma",
"\x03c4-tau",
"\x03c5-upsilon",
"\x03c6-phi",
"\x03c7-chi",
"\x03c8-psi",
"\x03c9-omega",
"\x03d1-thetasym",
"\x03d2-upsih",
"\x03d6-piv",
"\x2002-ensp",
"\x2003-emsp",
"\x2009-thinsp",
"\x200c-zwnj",
"\x200d-zwj",
"\x200e-lrm",
"\x200f-rlm",
"\x2013-ndash",
"\x2014-mdash",
"\x2018-lsquo",
"\x2019-rsquo",
"\x201a-sbquo",
"\x201c-ldquo",
"\x201d-rdquo",
"\x201e-bdquo",
"\x2020-dagger",
"\x2021-Dagger",
"\x2022-bull",
"\x2026-hellip",
"\x2030-permil",
"\x2032-prime",
"\x2033-Prime",
"\x2039-lsaquo",
"\x203a-rsaquo",
"\x203e-oline",
"\x2044-frasl",
"\x20ac-euro",
"\x2111-image",
"\x2118-weierp",
"\x211c-real",
"\x2122-trade",
"\x2135-alefsym",
"\x2190-larr",
"\x2191-uarr",
"\x2192-rarr",
"\x2193-darr",
"\x2194-harr",
"\x21b5-crarr",
"\x21d0-lArr",
"\x21d1-uArr",
"\x21d2-rArr",
"\x21d3-dArr",
"\x21d4-hArr",
"\x2200-forall",
"\x2202-part",
"\x2203-exist",
"\x2205-empty",
"\x2207-nabla",
"\x2208-isin",
"\x2209-notin",
"\x220b-ni",
"\x220f-prod",
"\x2211-sum",
"\x2212-minus",
"\x2217-lowast",
"\x221a-radic",
"\x221d-prop",
"\x221e-infin",
"\x2220-ang",
"\x2227-and",
"\x2228-or",
"\x2229-cap",
"\x222a-cup",
"\x222b-int",
"\x2234-there4",
"\x223c-sim",
"\x2245-cong",
"\x2248-asymp",
"\x2260-ne",
"\x2261-equiv",
"\x2264-le",
"\x2265-ge",
"\x2282-sub",
"\x2283-sup",
"\x2284-nsub",
"\x2286-sube",
"\x2287-supe",
"\x2295-oplus",
"\x2297-otimes",
"\x22a5-perp",
"\x22c5-sdot",
"\x2308-lceil",
"\x2309-rceil",
"\x230a-lfloor",
"\x230b-rfloor",
"\x2329-lang",
"\x232a-rang",
"\x25ca-loz",
"\x2660-spades",
"\x2663-clubs",
"\x2665-hearts",
"\x2666-diams",
};
private static LowLevelDictionary<string, char> s_lookupTable = GenerateLookupTable();
private static LowLevelDictionary<string, char> GenerateLookupTable()
{
// e[0] is unicode char, e[1] is '-', e[2+] is entity string
LowLevelDictionary<string, char> lookupTable = new LowLevelDictionary<string, char>(StringComparer.Ordinal);
foreach (string e in s_entitiesList)
{
lookupTable.Add(e.Substring(2), e[0]);
}
return lookupTable;
}
public static char Lookup(string entity)
{
char theChar;
s_lookupTable.TryGetValue(entity, out theChar);
return theChar;
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Net;
using System.Net.Sockets;
using System.Runtime;
using System.Threading.Tasks;
#if FEATURE_NETNATIVE
using System.Collections.Generic;
using Windows.Networking;
using Windows.Networking.Connectivity;
using Windows.Networking.Sockets;
using RTSocketError = Windows.Networking.Sockets.SocketError;
#endif
namespace System.ServiceModel.Channels
{
internal static class DnsCache
{
private const int mruWatermark = 64;
private static MruCache<string, DnsCacheEntry> s_resolveCache = new MruCache<string, DnsCacheEntry>(mruWatermark);
private static readonly TimeSpan s_cacheTimeout = TimeSpan.FromSeconds(2);
// Double-checked locking pattern requires volatile for read/write synchronization
private static volatile string s_machineName;
private static object ThisLock
{
get
{
return s_resolveCache;
}
}
public static string MachineName
{
get
{
if (s_machineName == null)
{
lock (ThisLock)
{
if (s_machineName == null)
{
try
{
#if FEATURE_NETNATIVE
var hostNamesList = NetworkInformation.GetHostNames();
foreach (var entry in hostNamesList)
{
if (entry.Type == HostNameType.DomainName)
{
s_machineName = entry.CanonicalName;
break;
}
}
#else
s_machineName = Dns.GetHostEntryAsync(String.Empty).GetAwaiter().GetResult().HostName;
#endif
}
catch (SocketException)
{
throw;
}
}
}
}
return s_machineName;
}
}
public static async Task<IPAddress[]> ResolveAsync(Uri uri)
{
string hostName = uri.DnsSafeHost;
IPAddress[] hostAddresses = null;
DateTime now = DateTime.UtcNow;
lock (ThisLock)
{
DnsCacheEntry cacheEntry;
if (s_resolveCache.TryGetValue(hostName, out cacheEntry))
{
if (now.Subtract(cacheEntry.TimeStamp) > s_cacheTimeout)
{
s_resolveCache.Remove(hostName);
cacheEntry = null;
}
else
{
if (cacheEntry.AddressList == null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(
new EndpointNotFoundException(SR.Format(SR.DnsResolveFailed, hostName)));
}
hostAddresses = cacheEntry.AddressList;
}
}
}
if (hostAddresses == null)
{
SocketException dnsException = null;
try
{
hostAddresses = await LookupHostName(hostName);
}
catch (SocketException e)
{
dnsException = e;
}
lock (ThisLock)
{
// MruCache doesn't have a this[] operator, so we first remove (just in case it exists already)
s_resolveCache.Remove(hostName);
s_resolveCache.Add(hostName, new DnsCacheEntry(hostAddresses, now));
}
if (dnsException != null)
{
throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(
new EndpointNotFoundException(SR.Format(SR.DnsResolveFailed, hostName), dnsException));
}
}
return hostAddresses;
}
#if FEATURE_NETNATIVE
internal static async Task<IPAddress[]> LookupHostName(string hostName)
{
try
{
IReadOnlyList<EndpointPair> data = await DatagramSocket.GetEndpointPairsAsync(new HostName(hostName), "0").AsTask();
List<IPAddress> addresses = new List<IPAddress>(data.Count);
if (data != null && data.Count > 0)
{
foreach (EndpointPair item in data)
{
if (item != null && item.RemoteHostName != null &&
(item.RemoteHostName.Type == HostNameType.Ipv4 || item.RemoteHostName.Type == HostNameType.Ipv6))
{
IPAddress address;
if(IPAddress.TryParse(item.RemoteHostName.CanonicalName, out address))
{
addresses.Add(address);
}
}
}
}
return addresses.ToArray();
}
catch (Exception exception)
{
if (RTSocketError.GetStatus(exception.HResult) != SocketErrorStatus.Unknown)
{
throw new SocketException(exception.HResult & 0x0000FFFF);
}
throw;
}
}
#else
internal static async Task<IPAddress[]> LookupHostName(string hostName)
{
return (await Dns.GetHostEntryAsync(hostName)).AddressList;
}
#endif
internal class DnsCacheEntry
{
private DateTime _timeStamp;
private IPAddress[] _addressList;
public DnsCacheEntry(IPAddress[] addressList, DateTime timeStamp)
{
_timeStamp = timeStamp;
_addressList = addressList;
}
public IPAddress[] AddressList
{
get
{
return _addressList;
}
}
public DateTime TimeStamp
{
get
{
return _timeStamp;
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
////////////////////////////////////////////////////////////////////////////
//
// Class: CompareInfo
//
//
// Purpose: This class implements a set of methods for comparing
// strings.
//
// Date: August 12, 1998
//
////////////////////////////////////////////////////////////////////////////
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Threading;
using System.Diagnostics.Contracts;
namespace System.Globalization
{
[Flags]
[System.Runtime.InteropServices.ComVisible(true)]
public enum CompareOptions
{
None = 0x00000000,
IgnoreCase = 0x00000001,
IgnoreNonSpace = 0x00000002,
IgnoreSymbols = 0x00000004,
IgnoreKanaType = 0x00000008, // ignore kanatype
IgnoreWidth = 0x00000010, // ignore width
OrdinalIgnoreCase = 0x10000000, // This flag can not be used with other flags.
StringSort = 0x20000000, // use string sort method
Ordinal = 0x40000000, // This flag can not be used with other flags.
}
[System.Runtime.InteropServices.ComVisible(true)]
public partial class CompareInfo
{
// Mask used to check if IndexOf()/LastIndexOf()/IsPrefix()/IsPostfix() has the right flags.
private const CompareOptions ValidIndexMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType);
// Mask used to check if Compare() has the right flags.
private const CompareOptions ValidCompareMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType | CompareOptions.StringSort);
// Mask used to check if GetHashCodeOfString() has the right flags.
private const CompareOptions ValidHashCodeOfStringMaskOffFlags =
~(CompareOptions.IgnoreCase | CompareOptions.IgnoreSymbols | CompareOptions.IgnoreNonSpace |
CompareOptions.IgnoreWidth | CompareOptions.IgnoreKanaType);
//
// CompareInfos have an interesting identity. They are attached to the locale that created them,
// ie: en-US would have an en-US sort. For haw-US (custom), then we serialize it as haw-US.
// The interesting part is that since haw-US doesn't have its own sort, it has to point at another
// locale, which is what SCOMPAREINFO does.
private readonly String m_name; // The name used to construct this CompareInfo
private readonly String m_sortName; // The name that defines our behavior
/*=================================GetCompareInfo==========================
**Action: Get the CompareInfo for the specified culture.
**Returns: The CompareInfo for the specified culture.
**Arguments:
** name the name of the culture.
**Exceptions:
** ArgumentException if name is invalid.
============================================================================*/
public static CompareInfo GetCompareInfo(String name)
{
if (name == null)
{
throw new ArgumentNullException("name");
}
Contract.EndContractBlock();
return CultureInfo.GetCultureInfo(name).CompareInfo;
}
///////////////////////////----- Name -----/////////////////////////////////
//
// Returns the name of the culture (well actually, of the sort).
// Very important for providing a non-LCID way of identifying
// what the sort is.
//
// Note that this name isn't dereferenced in case the CompareInfo is a different locale
// which is consistent with the behaviors of earlier versions. (so if you ask for a sort
// and the locale's changed behavior, then you'll get changed behavior, which is like
// what happens for a version update)
//
////////////////////////////////////////////////////////////////////////
[System.Runtime.InteropServices.ComVisible(false)]
public virtual String Name
{
get
{
Contract.Assert(m_name != null, "CompareInfo.Name Expected m_name to be set");
if (m_name == "zh-CHT" || m_name == "zh-CHS")
{
return m_name;
}
return m_sortName;
}
}
////////////////////////////////////////////////////////////////////////
//
// Compare
//
// Compares the two strings with the given options. Returns 0 if the
// two strings are equal, a number less than 0 if string1 is less
// than string2, and a number greater than 0 if string1 is greater
// than string2.
//
////////////////////////////////////////////////////////////////////////
public virtual int Compare(String string1, String string2)
{
return (Compare(string1, string2, CompareOptions.None));
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual int Compare(String string1, String string2, CompareOptions options)
{
if (options == CompareOptions.OrdinalIgnoreCase)
{
return String.Compare(string1, string2, StringComparison.OrdinalIgnoreCase);
}
// Verify the options before we do any real comparison.
if ((options & CompareOptions.Ordinal) != 0)
{
if (options != CompareOptions.Ordinal)
{
throw new ArgumentException(SR.Argument_CompareOptionOrdinal, "options");
}
return String.CompareOrdinal(string1, string2);
}
if ((options & ValidCompareMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
}
//Our paradigm is that null sorts less than any other string and
//that two nulls sort as equal.
if (string1 == null)
{
if (string2 == null)
{
return (0); // Equal
}
return (-1); // null < non-null
}
if (string2 == null)
{
return (1); // non-null > null
}
return CompareString(string1, 0, string1.Length, string2, 0, string2.Length, options);
}
////////////////////////////////////////////////////////////////////////
//
// Compare
//
// Compares the specified regions of the two strings with the given
// options.
// Returns 0 if the two strings are equal, a number less than 0 if
// string1 is less than string2, and a number greater than 0 if
// string1 is greater than string2.
//
////////////////////////////////////////////////////////////////////////
public unsafe virtual int Compare(String string1, int offset1, int length1, String string2, int offset2, int length2)
{
return Compare(string1, offset1, length1, string2, offset2, length2, 0);
}
public unsafe virtual int Compare(String string1, int offset1, String string2, int offset2, CompareOptions options)
{
return Compare(string1, offset1, string1 == null ? 0 : string1.Length - offset1,
string2, offset2, string2 == null ? 0 : string2.Length - offset2, options);
}
public unsafe virtual int Compare(String string1, int offset1, String string2, int offset2)
{
return Compare(string1, offset1, string2, offset2, 0);
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual int Compare(String string1, int offset1, int length1, String string2, int offset2, int length2, CompareOptions options)
{
if (options == CompareOptions.OrdinalIgnoreCase)
{
int result = String.Compare(string1, offset1, string2, offset2, length1 < length2 ? length1 : length2, StringComparison.OrdinalIgnoreCase);
if ((length1 != length2) && result == 0)
return (length1 > length2 ? 1 : -1);
return (result);
}
// Verify inputs
if (length1 < 0 || length2 < 0)
{
throw new ArgumentOutOfRangeException((length1 < 0) ? "length1" : "length2", SR.ArgumentOutOfRange_NeedPosNum);
}
if (offset1 < 0 || offset2 < 0)
{
throw new ArgumentOutOfRangeException((offset1 < 0) ? "offset1" : "offset2", SR.ArgumentOutOfRange_NeedPosNum);
}
if (offset1 > (string1 == null ? 0 : string1.Length) - length1)
{
throw new ArgumentOutOfRangeException("string1", SR.ArgumentOutOfRange_OffsetLength);
}
if (offset2 > (string2 == null ? 0 : string2.Length) - length2)
{
throw new ArgumentOutOfRangeException("string2", SR.ArgumentOutOfRange_OffsetLength);
}
if ((options & CompareOptions.Ordinal) != 0)
{
if (options != CompareOptions.Ordinal)
{
throw new ArgumentException(SR.Argument_CompareOptionOrdinal,
"options");
}
}
else if ((options & ValidCompareMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
}
//
// Check for the null case.
//
if (string1 == null)
{
if (string2 == null)
{
return (0);
}
return (-1);
}
if (string2 == null)
{
return (1);
}
if (options == CompareOptions.Ordinal)
{
return CompareOrdinal(string1, offset1, length1,
string2, offset2, length2);
}
return CompareString(string1, offset1, length1,
string2, offset2, length2,
options);
}
[System.Security.SecurityCritical]
private static int CompareOrdinal(string string1, int offset1, int length1, string string2, int offset2, int length2)
{
int result = String.CompareOrdinal(string1, offset1, string2, offset2,
(length1 < length2 ? length1 : length2));
if ((length1 != length2) && result == 0)
{
return (length1 > length2 ? 1 : -1);
}
return (result);
}
//
// CompareOrdinalIgnoreCase compare two string oridnally with ignoring the case.
// it assumes the strings are Ascii string till we hit non Ascii character in strA or strB and then we continue the comparison by
// calling the OS.
//
[System.Security.SecuritySafeCritical]
internal static unsafe int CompareOrdinalIgnoreCase(string strA, int indexA, int lengthA, string strB, int indexB, int lengthB)
{
Contract.Assert(indexA + lengthA <= strA.Length);
Contract.Assert(indexB + lengthB <= strB.Length);
int length = Math.Min(lengthA, lengthB);
int range = length;
fixed (char* ap = strA) fixed (char* bp = strB)
{
char* a = ap + indexA;
char* b = bp + indexB;
while (length != 0 && (*a <= 0x80) && (*b <= 0x80))
{
int charA = *a;
int charB = *b;
if (charA == charB)
{
a++; b++;
length--;
continue;
}
// uppercase both chars - notice that we need just one compare per char
if ((uint)(charA - 'a') <= (uint)('z' - 'a')) charA -= 0x20;
if ((uint)(charB - 'a') <= (uint)('z' - 'a')) charB -= 0x20;
//Return the (case-insensitive) difference between them.
if (charA != charB)
return charA - charB;
// Next char
a++; b++;
length--;
}
if (length == 0)
return lengthA - lengthB;
range -= length;
return CompareStringOrdinalIgnoreCase(a, lengthA - range, b, lengthB - range);
}
}
////////////////////////////////////////////////////////////////////////
//
// IsPrefix
//
// Determines whether prefix is a prefix of string. If prefix equals
// String.Empty, true is returned.
//
////////////////////////////////////////////////////////////////////////
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual bool IsPrefix(String source, String prefix, CompareOptions options)
{
if (source == null || prefix == null)
{
throw new ArgumentNullException((source == null ? "source" : "prefix"),
SR.ArgumentNull_String);
}
Contract.EndContractBlock();
int prefixLen = prefix.Length;
if (prefixLen == 0)
{
return (true);
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
return source.StartsWith(prefix, StringComparison.OrdinalIgnoreCase);
}
if (options == CompareOptions.Ordinal)
{
return source.StartsWith(prefix, StringComparison.Ordinal);
}
if ((options & ValidIndexMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
}
return StartsWith(source, prefix, options);
}
public virtual bool IsPrefix(String source, String prefix)
{
return (IsPrefix(source, prefix, 0));
}
////////////////////////////////////////////////////////////////////////
//
// IsSuffix
//
// Determines whether suffix is a suffix of string. If suffix equals
// String.Empty, true is returned.
//
////////////////////////////////////////////////////////////////////////
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual bool IsSuffix(String source, String suffix, CompareOptions options)
{
if (source == null || suffix == null)
{
throw new ArgumentNullException((source == null ? "source" : "suffix"),
SR.ArgumentNull_String);
}
Contract.EndContractBlock();
int suffixLen = suffix.Length;
int sourceLength = source.Length;
if (suffixLen == 0)
{
return (true);
}
if (sourceLength == 0)
{
return false;
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
return source.EndsWith(suffix, StringComparison.OrdinalIgnoreCase);
}
if (options == CompareOptions.Ordinal)
{
return source.EndsWith(suffix, StringComparison.Ordinal);
}
if ((options & ValidIndexMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
}
return EndsWith(source, suffix, options);
}
public virtual bool IsSuffix(String source, String suffix)
{
return (IsSuffix(source, suffix, 0));
}
////////////////////////////////////////////////////////////////////////
//
// IndexOf
//
// Returns the first index where value is found in string. The
// search starts from startIndex and ends at endIndex. Returns -1 if
// the specified value is not found. If value equals String.Empty,
// startIndex is returned. Throws IndexOutOfRange if startIndex or
// endIndex is less than zero or greater than the length of string.
// Throws ArgumentException if value is null.
//
////////////////////////////////////////////////////////////////////////
public unsafe virtual int IndexOf(String source, char value)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, CompareOptions.None);
}
public unsafe virtual int IndexOf(String source, String value)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, CompareOptions.None);
}
public unsafe virtual int IndexOf(String source, char value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, options);
}
public unsafe virtual int IndexOf(String source, String value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
return IndexOf(source, value, 0, source.Length, options);
}
public unsafe virtual int IndexOf(String source, char value, int startIndex, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
return IndexOf(source, value, startIndex, source.Length - startIndex, options);
}
public unsafe virtual int IndexOf(String source, String value, int startIndex, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
return IndexOf(source, value, startIndex, source.Length - startIndex, options);
}
public unsafe virtual int IndexOf(String source, char value, int startIndex, int count)
{
return IndexOf(source, value, startIndex, count, CompareOptions.None);
}
public unsafe virtual int IndexOf(String source, String value, int startIndex, int count)
{
return IndexOf(source, value, startIndex, count, CompareOptions.None);
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual int IndexOf(String source, char value, int startIndex, int count, CompareOptions options)
{
// Validate inputs
if (source == null)
throw new ArgumentNullException("source");
if (startIndex < 0 || startIndex > source.Length)
throw new ArgumentOutOfRangeException("startIndex", SR.ArgumentOutOfRange_Index);
if (count < 0 || startIndex > source.Length - count)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_Count);
Contract.EndContractBlock();
if (options == CompareOptions.OrdinalIgnoreCase)
{
// TODO: NLS Arrowhead: Make this not need a new String()
return source.IndexOf(value.ToString(), startIndex, count, StringComparison.OrdinalIgnoreCase);
}
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 && (options != CompareOptions.Ordinal))
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
return IndexOfCore(source, new string(value, 1), startIndex, count, options);
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual int IndexOf(String source, String value, int startIndex, int count, CompareOptions options)
{
// Validate inputs
if (source == null)
throw new ArgumentNullException("source");
if (value == null)
throw new ArgumentNullException("value");
if (startIndex > source.Length)
{
throw new ArgumentOutOfRangeException("startIndex", SR.ArgumentOutOfRange_Index);
}
Contract.EndContractBlock();
// In Everett we used to return -1 for empty string even if startIndex is negative number so we keeping same behavior here.
// We return 0 if both source and value are empty strings for Everett compatibility too.
if (source.Length == 0)
{
if (value.Length == 0)
{
return 0;
}
return -1;
}
if (startIndex < 0)
{
throw new ArgumentOutOfRangeException("startIndex", SR.ArgumentOutOfRange_Index);
}
if (count < 0 || startIndex > source.Length - count)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_Count);
if (options == CompareOptions.OrdinalIgnoreCase)
{
return IndexOfOrdinal(source, value, startIndex, count, ignoreCase: true);
}
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 && (options != CompareOptions.Ordinal))
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
return IndexOfCore(source, value, startIndex, count, options);
}
////////////////////////////////////////////////////////////////////////
//
// LastIndexOf
//
// Returns the last index where value is found in string. The
// search starts from startIndex and ends at endIndex. Returns -1 if
// the specified value is not found. If value equals String.Empty,
// endIndex is returned. Throws IndexOutOfRange if startIndex or
// endIndex is less than zero or greater than the length of string.
// Throws ArgumentException if value is null.
//
////////////////////////////////////////////////////////////////////////
public unsafe virtual int LastIndexOf(String source, char value)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1,
source.Length, CompareOptions.None);
}
public virtual int LastIndexOf(String source, String value)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1,
source.Length, CompareOptions.None);
}
public virtual int LastIndexOf(String source, char value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1,
source.Length, options);
}
public unsafe virtual int LastIndexOf(String source, String value, CompareOptions options)
{
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
// Can't start at negative index, so make sure we check for the length == 0 case.
return LastIndexOf(source, value, source.Length - 1,
source.Length, options);
}
public unsafe virtual int LastIndexOf(String source, char value, int startIndex, CompareOptions options)
{
return LastIndexOf(source, value, startIndex, startIndex + 1, options);
}
public unsafe virtual int LastIndexOf(String source, String value, int startIndex, CompareOptions options)
{
return LastIndexOf(source, value, startIndex, startIndex + 1, options);
}
public unsafe virtual int LastIndexOf(String source, char value, int startIndex, int count)
{
return LastIndexOf(source, value, startIndex, count, CompareOptions.None);
}
public unsafe virtual int LastIndexOf(String source, String value, int startIndex, int count)
{
return LastIndexOf(source, value, startIndex, count, CompareOptions.None);
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual int LastIndexOf(String source, char value, int startIndex, int count, CompareOptions options)
{
// Verify Arguments
if (source == null)
throw new ArgumentNullException("source");
Contract.EndContractBlock();
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 &&
(options != CompareOptions.Ordinal) &&
(options != CompareOptions.OrdinalIgnoreCase))
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
// Special case for 0 length input strings
if (source.Length == 0 && (startIndex == -1 || startIndex == 0))
return -1;
// Make sure we're not out of range
if (startIndex < 0 || startIndex > source.Length)
throw new ArgumentOutOfRangeException("startIndex", SR.ArgumentOutOfRange_Index);
// Make sure that we allow startIndex == source.Length
if (startIndex == source.Length)
{
startIndex--;
if (count > 0)
count--;
}
// 2nd have of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0.
if (count < 0 || startIndex - count + 1 < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_Count);
if (options == CompareOptions.OrdinalIgnoreCase)
{
// TODO: NLS Arrowhead - Make this not need a new String()
return source.LastIndexOf(value.ToString(), startIndex, count, StringComparison.OrdinalIgnoreCase);
}
return LastIndexOfCore(source, new string(value, 1), startIndex, count, options);
}
[System.Security.SecuritySafeCritical] // auto-generated
public unsafe virtual int LastIndexOf(String source, String value, int startIndex, int count, CompareOptions options)
{
// Verify Arguments
if (source == null)
throw new ArgumentNullException("source");
if (value == null)
throw new ArgumentNullException("value");
Contract.EndContractBlock();
// Validate CompareOptions
// Ordinal can't be selected with other flags
if ((options & ValidIndexMaskOffFlags) != 0 &&
(options != CompareOptions.Ordinal) &&
(options != CompareOptions.OrdinalIgnoreCase))
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
// Special case for 0 length input strings
if (source.Length == 0 && (startIndex == -1 || startIndex == 0))
return (value.Length == 0) ? 0 : -1;
// Make sure we're not out of range
if (startIndex < 0 || startIndex > source.Length)
throw new ArgumentOutOfRangeException("startIndex", SR.ArgumentOutOfRange_Index);
// Make sure that we allow startIndex == source.Length
if (startIndex == source.Length)
{
startIndex--;
if (count > 0)
count--;
// If we are looking for nothing, just return 0
if (value.Length == 0 && count >= 0 && startIndex - count + 1 >= 0)
return startIndex;
}
// 2nd half of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0.
if (count < 0 || startIndex - count + 1 < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_Count);
if (options == CompareOptions.OrdinalIgnoreCase)
{
return LastIndexOfOrdinal(source, value, startIndex, count, ignoreCase: true);
}
return LastIndexOfCore(source, value, startIndex, count, options);
}
////////////////////////////////////////////////////////////////////////
//
// Equals
//
// Implements Object.Equals(). Returns a boolean indicating whether
// or not object refers to the same CompareInfo as the current
// instance.
//
////////////////////////////////////////////////////////////////////////
public override bool Equals(Object value)
{
CompareInfo that = value as CompareInfo;
if (that != null)
{
return this.Name == that.Name;
}
return (false);
}
////////////////////////////////////////////////////////////////////////
//
// GetHashCode
//
// Implements Object.GetHashCode(). Returns the hash code for the
// CompareInfo. The hash code is guaranteed to be the same for
// CompareInfo A and B where A.Equals(B) is true.
//
////////////////////////////////////////////////////////////////////////
public override int GetHashCode()
{
return (this.Name.GetHashCode());
}
////////////////////////////////////////////////////////////////////////
//
// GetHashCodeOfString
//
// This internal method allows a method that allows the equivalent of creating a Sortkey for a
// string from CompareInfo, and generate a hashcode value from it. It is not very convenient
// to use this method as is and it creates an unnecessary Sortkey object that will be GC'ed.
//
// The hash code is guaranteed to be the same for string A and B where A.Equals(B) is true and both
// the CompareInfo and the CompareOptions are the same. If two different CompareInfo objects
// treat the string the same way, this implementation will treat them differently (the same way that
// Sortkey does at the moment).
//
// This method will never be made public itself, but public consumers of it could be created, e.g.:
//
// string.GetHashCode(CultureInfo)
// string.GetHashCode(CompareInfo)
// string.GetHashCode(CultureInfo, CompareOptions)
// string.GetHashCode(CompareInfo, CompareOptions)
// etc.
//
// (the methods above that take a CultureInfo would use CultureInfo.CompareInfo)
//
////////////////////////////////////////////////////////////////////////
internal int GetHashCodeOfString(string source, CompareOptions options)
{
//
// Parameter validation
//
if (null == source)
{
throw new ArgumentNullException("source");
}
if ((options & ValidHashCodeOfStringMaskOffFlags) != 0)
{
throw new ArgumentException(SR.Argument_InvalidFlag, "options");
}
Contract.EndContractBlock();
return GetHashCodeOfStringCore(source, options);
}
public virtual int GetHashCode(string source, CompareOptions options)
{
if (source == null)
{
throw new ArgumentNullException("source");
}
if (options == CompareOptions.Ordinal)
{
return source.GetHashCode();
}
if (options == CompareOptions.OrdinalIgnoreCase)
{
return TextInfo.GetHashCodeOrdinalIgnoreCase(source);
}
//
// GetHashCodeOfString does more parameters validation. basically will throw when
// having Ordinal, OrdinalIgnoreCase and StringSort
//
return GetHashCodeOfString(source, options);
}
////////////////////////////////////////////////////////////////////////
//
// ToString
//
// Implements Object.ToString(). Returns a string describing the
// CompareInfo.
//
////////////////////////////////////////////////////////////////////////
public override String ToString()
{
return ("CompareInfo - " + this.Name);
}
}
}
| |
// Camera Path 3
// Available on the Unity Asset Store
// Copyright (c) 2013 Jasper Stocker http://support.jasperstocker.com/camera-path/
// For support contact email@jasperstocker.com
//
// THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY
// KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
// PARTICULAR PURPOSE.
using UnityEngine;
using UnityEditor;
public class CameraPathEditorSceneGUI
{
private const float LINE_RESOLUTION = 0.005f;
private const float HANDLE_SCALE = 0.1f;
public static CameraPath _cameraPath;
public static CameraPathAnimator _animator;
public static int selectedPointIndex
{
get { return _cameraPath.selectedPoint; }
set { _cameraPath.selectedPoint = value; }
}
public static CameraPath.PointModes _pointMode
{
get { return _cameraPath.pointMode; }
set { _cameraPath.pointMode = value; }
}
public static void OnSceneGUI()
{
if(!_cameraPath.showGizmos)
return;
if(_cameraPath.transform.rotation != Quaternion.identity)
return;
_pointMode = _cameraPath.pointMode;
if (SceneView.focusedWindow != null)
SceneView.focusedWindow.wantsMouseMove = false;
//draw small point indicators
Handles.color = CameraPathColours.GREY;
int numberOfCPoints = _cameraPath.fovList.realNumberOfPoints;
for (int i = 0; i < numberOfCPoints; i++)
{
CameraPathPoint point = _cameraPath.fovList[i];
if (point.positionModes == CameraPathPoint.PositionModes.Free)
Handles.DotCap(0, point.worldPosition, Quaternion.identity, 0.2f);
}
numberOfCPoints = _cameraPath.delayList.realNumberOfPoints;
for (int i = 0; i < numberOfCPoints; i++)
{
CameraPathPoint point = _cameraPath.delayList[i];
if (point.positionModes == CameraPathPoint.PositionModes.Free)
Handles.DotCap(0, point.worldPosition, Quaternion.identity, 0.2f);
}
numberOfCPoints = _cameraPath.orientationList.realNumberOfPoints;
for (int i = 0; i < numberOfCPoints; i++)
{
CameraPathPoint point = _cameraPath.orientationList[i];
if (point.positionModes == CameraPathPoint.PositionModes.Free)
Handles.DotCap(0, point.worldPosition, Quaternion.identity, 0.2f);
}
numberOfCPoints = _cameraPath.speedList.realNumberOfPoints;
for (int i = 0; i < numberOfCPoints; i++)
{
CameraPathPoint point = _cameraPath.speedList[i];
if (point.positionModes == CameraPathPoint.PositionModes.Free)
Handles.DotCap(0, point.worldPosition, Quaternion.identity, 0.2f);
}
numberOfCPoints = _cameraPath.tiltList.realNumberOfPoints;
for (int i = 0; i < numberOfCPoints; i++)
{
CameraPathPoint point = _cameraPath.tiltList[i];
if (point.positionModes == CameraPathPoint.PositionModes.Free)
Handles.DotCap(0, point.worldPosition, Quaternion.identity, 0.2f);
}
//draw path outline
Camera sceneCamera = Camera.current;
int numberOfPoints = _cameraPath.numberOfPoints;
Handles.color = _cameraPath.selectedPathColour;
float pointPercentage = 1.0f / (numberOfPoints - 1);
for(int i = 0; i < numberOfPoints-1; i++)
{
CameraPathControlPoint pointA = _cameraPath.GetPoint(i);
CameraPathControlPoint pointB = _cameraPath.GetPoint(i+1);
float dotPA = Vector3.Dot(sceneCamera.transform.forward, pointA.worldPosition - sceneCamera.transform.position);
float dotPB = Vector3.Dot(sceneCamera.transform.forward, pointB.worldPosition - sceneCamera.transform.position);
if (dotPA < 0 && dotPB < 0)//points are both behind camera - don't render
continue;
float pointAPercentage = pointPercentage * i;
float pointBPercentage = pointPercentage * (i + 1);
float arcPercentage = pointBPercentage - pointAPercentage;
Vector3 arcCentre = (pointA.worldPosition + pointB.worldPosition) * 0.5f;
float arcLength = _cameraPath.StoredArcLength(_cameraPath.GetCurveIndex(pointA.index));
float arcDistance = Vector3.Distance(sceneCamera.transform.position, arcCentre);
int arcPoints = Mathf.Max(Mathf.RoundToInt(arcLength * (40 / Mathf.Max(arcDistance,20))), 10);
float arcTime = 1.0f / arcPoints;
float endLoop = 1.0f - arcTime;
Vector3 lastPoint = Vector3.zero;
for (float p = 0; p < endLoop; p += arcTime)
{
float p2 = p + arcTime;
float pathPercentageA = pointAPercentage + arcPercentage * p;
float pathPercentageB = pointAPercentage + arcPercentage * p2;
Vector3 lineStart = _cameraPath.GetPathPosition(pathPercentageA, true);
Vector3 lineEnd = _cameraPath.GetPathPosition(pathPercentageB, true);
Handles.DrawLine(lineStart, lineEnd);
lastPoint = lineEnd;
}
Handles.DrawLine(lastPoint, _cameraPath.GetPathPosition(pointBPercentage, true));
}
switch(_pointMode)
{
case CameraPath.PointModes.Transform:
SceneGUIPointBased();
break;
case CameraPath.PointModes.ControlPoints:
SceneGUIPointBased();
break;
case CameraPath.PointModes.Orientations:
SceneGUIOrientationBased();
break;
case CameraPath.PointModes.FOV:
SceneGUIFOVBased();
break;
case CameraPath.PointModes.Events:
SceneGUIEventBased();
break;
case CameraPath.PointModes.Speed:
SceneGUISpeedBased();
break;
case CameraPath.PointModes.Tilt:
SceneGUITiltBased();
break;
case CameraPath.PointModes.Delay:
SceneGUIDelayBased();
break;
case CameraPath.PointModes.Ease:
SceneGUIEaseBased();
break;
case CameraPath.PointModes.AddPathPoints:
AddPathPoints();
break;
case CameraPath.PointModes.RemovePathPoints:
RemovePathPoints();
break;
case CameraPath.PointModes.AddOrientations:
AddCPathPoints();
break;
case CameraPath.PointModes.AddFovs:
AddCPathPoints();
break;
case CameraPath.PointModes.AddTilts:
AddCPathPoints();
break;
case CameraPath.PointModes.AddEvents:
AddCPathPoints();
break;
case CameraPath.PointModes.AddSpeeds:
AddCPathPoints();
break;
case CameraPath.PointModes.AddDelays:
AddCPathPoints();
break;
case CameraPath.PointModes.RemoveOrientations:
RemoveCPathPoints();
break;
case CameraPath.PointModes.RemoveTilts:
RemoveCPathPoints();
break;
case CameraPath.PointModes.RemoveFovs:
RemoveCPathPoints();
break;
case CameraPath.PointModes.RemoveEvents:
RemoveCPathPoints();
break;
case CameraPath.PointModes.RemoveSpeeds:
RemoveCPathPoints();
break;
case CameraPath.PointModes.RemoveDelays:
RemoveCPathPoints();
break;
}
if (Event.current.type == EventType.ValidateCommand)
{
switch (Event.current.commandName)
{
case "UndoRedoPerformed":
GUI.changed = true;
break;
}
}
}
private static void SceneGUIPointBased()
{
Camera sceneCamera = Camera.current;
int realNumberOfPoints = _cameraPath.realNumberOfPoints;
for (int i = 0; i < realNumberOfPoints; i++)
{
CameraPathControlPoint point = _cameraPath[i];
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) < 0)
continue;
if (_cameraPath.enableUndo) Undo.RecordObject(point, "Modifying Path Point");
Handles.Label(point.worldPosition, point.displayName+"\n"+(point.percentage*100).ToString("F1")+"%");
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
if (Handles.Button(point.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
if (i == selectedPointIndex)
_cameraPath.pointMode = CameraPath.PointModes.Transform;
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if(i == selectedPointIndex)
{
if (_pointMode == CameraPath.PointModes.Transform || _cameraPath.interpolation != CameraPath.Interpolation.Bezier)
{
Vector3 currentPosition = point.worldPosition;
currentPosition = Handles.DoPositionHandle(currentPosition, Quaternion.identity);
point.worldPosition = currentPosition;
// SerializedObject so = new SerializedObject(point);
// SerializedProperty pointPosition = so.FindProperty("_position");
// Vector3 currentWorldPosition = point.worldPosition;
// currentWorldPosition = Handles.DoPositionHandle(currentWorldPosition, Quaternion.identity);
// pointPosition.vector3Value = point.WorldToLocalPosition(currentWorldPosition);
if(_cameraPath.interpolation == CameraPath.Interpolation.Bezier)
{
Handles.color = CameraPathColours.DARKGREY;
float pointSize = pointHandleSize * 0.5f;
Handles.DrawLine(point.worldPosition, point.forwardControlPointWorld);
Handles.DrawLine(point.worldPosition, point.backwardControlPointWorld);
if (Handles.Button(point.forwardControlPointWorld, Quaternion.identity, pointSize, pointSize, Handles.DotCap))
_cameraPath.pointMode = CameraPath.PointModes.ControlPoints;
if (Handles.Button(point.backwardControlPointWorld, Quaternion.identity, pointSize, pointSize, Handles.DotCap))
_cameraPath.pointMode = CameraPath.PointModes.ControlPoints;
}
}
else
{
//Backward ControlPoints point - render first so it's behind the forward
Handles.DrawLine(point.worldPosition, point.backwardControlPointWorld);
point.backwardControlPointWorld = Handles.DoPositionHandle(point.backwardControlPointWorld, Quaternion.identity);
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) > 0)
Handles.Label(point.backwardControlPoint, "point " + i + " reverse ControlPoints point");
//Forward ControlPoints point
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) > 0)
Handles.Label(point.forwardControlPoint, "point " + i + " ControlPoints point");
Handles.color = _cameraPath.selectedPointColour;
Handles.DrawLine(point.worldPosition, point.forwardControlPointWorld);
point.forwardControlPointWorld = Handles.DoPositionHandle(point.forwardControlPointWorld, Quaternion.identity);
}
}
}
}
private static void SceneGUIOrientationBased()
{
DisplayAtPoint();
CameraPathOrientationList orientationList = _cameraPath.orientationList;
Camera sceneCamera = Camera.current;
int orientationCount = orientationList.realNumberOfPoints;
for (int i = 0; i < orientationCount; i++)
{
CameraPathOrientation orientation = orientationList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(orientation, "Modifying Orientation Point");
if (Vector3.Dot(sceneCamera.transform.forward, orientation.worldPosition - sceneCamera.transform.position) < 0)
continue;
string orientationLabel = orientation.displayName;
orientationLabel += "\nat percentage: " + orientation.percent.ToString("F3");
switch(orientation.positionModes)
{
case CameraPathPoint.PositionModes.FixedToPoint:
orientationLabel += "\nat point: " + orientation.point.displayName;
break;
}
Handles.Label(orientation.worldPosition, orientationLabel);
float pointHandleSize = HandleUtility.GetHandleSize(orientation.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? Color.blue : _cameraPath.unselectedPointColour;
Handles.ArrowCap(0, orientation.worldPosition, orientation.rotation, pointHandleSize * 4);
if(i == selectedPointIndex)
{
//up arrow
Handles.color = Color.green;
Quaternion arrowUp = orientation.rotation * Quaternion.FromToRotation(Vector3.forward, Vector3.up);
Handles.ArrowCap(0, orientation.worldPosition, arrowUp, pointHandleSize * 4);
//right arrow
Handles.color = Color.red;
Quaternion arrowRight = orientation.rotation * Quaternion.FromToRotation(Vector3.forward, Vector3.right);
Handles.ArrowCap(0, orientation.worldPosition, arrowRight, pointHandleSize * 4);
}
if (Handles.Button(orientation.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if (i == selectedPointIndex)
{
Quaternion currentRotation = orientation.rotation;
currentRotation = Handles.DoRotationHandle(currentRotation, orientation.worldPosition);
if (currentRotation != orientation.rotation)
{
orientation.rotation = currentRotation;
}
CPPSlider(orientation);
}
}
if(_cameraPath.showOrientationIndicators)//draw orientation indicators
{
Handles.color = _cameraPath.orientationIndicatorColours;
float indicatorLength = _cameraPath.orientationIndicatorUnitLength / _cameraPath.pathLength;
for(float i = 0; i < 1; i += indicatorLength)
{
Vector3 indicatorPosition = _cameraPath.GetPathPosition(i);
Quaternion inicatorRotation = _cameraPath.GetPathRotation(i,false);
float indicatorHandleSize = HandleUtility.GetHandleSize(indicatorPosition) * HANDLE_SCALE * 4;
Handles.ArrowCap(0, indicatorPosition, inicatorRotation, indicatorHandleSize);
}
}
}
private static void SceneGUIFOVBased()
{
DisplayAtPoint();
CameraPathFOVList fovList = _cameraPath.fovList;
Camera sceneCamera = Camera.current;
int pointCount = fovList.realNumberOfPoints;
for (int i = 0; i < pointCount; i++)
{
CameraPathFOV fovPoint = fovList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(fovPoint, "Modifying FOV Point");
if (Vector3.Dot(sceneCamera.transform.forward, fovPoint.worldPosition - sceneCamera.transform.position) < 0)
continue;
string pointLabel = fovPoint.displayName;
pointLabel += "\nvalue: " + fovPoint.FOV.ToString("F1");
if (fovPoint.positionModes == CameraPathPoint.PositionModes.FixedToPoint) pointLabel += "\nat point: " + fovPoint.point.displayName;
else pointLabel += "\nat percentage: " + fovPoint.percent.ToString("F3");
Handles.Label(fovPoint.worldPosition, pointLabel);
float pointHandleSize = HandleUtility.GetHandleSize(fovPoint.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
if (Handles.Button(fovPoint.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if (i == selectedPointIndex)
{
CPPSlider(fovPoint);
}
}
}
private static void SceneGUIEventBased()
{
DisplayAtPoint();
CameraPathEventList eventList = _cameraPath.eventList;
Camera sceneCamera = Camera.current;
int pointCount = eventList.realNumberOfPoints;
for (int i = 0; i < pointCount; i++)
{
CameraPathEvent eventPoint = eventList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(eventPoint, "Modifying Event Point");
if (Vector3.Dot(sceneCamera.transform.forward, eventPoint.worldPosition - sceneCamera.transform.position) < 0)
continue;
string pointLabel = eventPoint.displayName;
pointLabel += "\ntype: " + eventPoint.type;
if (eventPoint.type == CameraPathEvent.Types.Broadcast) pointLabel += "\nevent name: " + eventPoint.eventName;
if (eventPoint.type == CameraPathEvent.Types.Call)
{
if (eventPoint.target != null)
pointLabel += "\nevent target: " + eventPoint.target.name + " calling: " + eventPoint.methodName;
else
pointLabel += "\nno target assigned";
}
if (eventPoint.positionModes == CameraPathPoint.PositionModes.FixedToPoint) pointLabel += "\nat point: " + eventPoint.point.displayName;
else pointLabel += "\nat percentage: " + eventPoint.percent.ToString("F3");
Handles.Label(eventPoint.worldPosition, pointLabel);
float pointHandleSize = HandleUtility.GetHandleSize(eventPoint.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
if (Handles.Button(eventPoint.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if(i == selectedPointIndex)
{
CPPSlider(eventPoint);
}
}
}
private static void SceneGUISpeedBased()
{
DisplayAtPoint();
CameraPathSpeedList pointList = _cameraPath.speedList;
Camera sceneCamera = Camera.current;
int pointCount = pointList.realNumberOfPoints;
for (int i = 0; i < pointCount; i++)
{
CameraPathSpeed point = pointList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(point, "Modifying Speed Point");
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) < 0)
continue;
string pointLabel = point.displayName;
pointLabel += "\nvalue: " + point.speed + " m/s";
pointLabel += "\npercent: " + point.percent;
pointLabel += "\na percent: " + _cameraPath.DeNormalisePercentage(point.percent);
Handles.Label(point.worldPosition, pointLabel);
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
if (Handles.Button(point.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if (i == selectedPointIndex)
{
CPPSlider(point);
}
}
}
private static void SceneGUITiltBased()
{
DisplayAtPoint();
CameraPathTiltList pointList = _cameraPath.tiltList;
Camera sceneCamera = Camera.current;
int pointCount = pointList.realNumberOfPoints;
for (int i = 0; i < pointCount; i++)
{
CameraPathTilt point = pointList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(point, "Modifying Tilt Point");
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) < 0)
continue;
string pointLabel = point.displayName;
pointLabel += "\nvalue: " + point.tilt.ToString("F1") + "\u00B0";
Handles.Label(point.worldPosition, pointLabel);
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
bool pointIsSelected = i == selectedPointIndex;
Handles.color = (pointIsSelected) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
float tiltSize = 2.0f;
Vector3 pointForwardDirection = _cameraPath.GetPathDirection(_cameraPath.DeNormalisePercentage(point.percent));
Quaternion qTilt = Quaternion.AngleAxis(-point.tilt, pointForwardDirection);
Quaternion pointForward = Quaternion.LookRotation(pointForwardDirection);
Handles.CircleCap(0, point.worldPosition, pointForward, tiltSize);
Vector3 horizontalLineDirection = ((qTilt * Quaternion.AngleAxis(-90, Vector3.up)) * pointForwardDirection).normalized * tiltSize;
Vector3 horizontalLineStart = point.worldPosition + horizontalLineDirection;
Vector3 horizontalLineEnd = point.worldPosition - horizontalLineDirection;
Handles.DrawLine(horizontalLineStart, horizontalLineEnd);
Vector3 verticalLineDirection = (Quaternion.AngleAxis(-90, pointForwardDirection) * horizontalLineDirection).normalized * tiltSize;
Vector3 verticalLineStart = point.worldPosition + verticalLineDirection;
Vector3 verticalLineEnd = point.worldPosition;
Handles.DrawLine(verticalLineStart, verticalLineEnd);
if (Handles.Button(point.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if (i == selectedPointIndex)
{
CPPSlider(point);
}
}
}
private static void SceneGUIDelayBased()
{
DisplayAtPoint();
CameraPathDelayList pointList = _cameraPath.delayList;
Camera sceneCamera = Camera.current;
int pointCount = pointList.realNumberOfPoints;
for (int i = 0; i < pointCount; i++)
{
CameraPathDelay point = pointList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(point, "Modifying Delay Point");
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) < 0)
continue;
string pointLabel = "";
if(point == pointList.introPoint)
{
pointLabel += "start point";
if (point.time > 0)
pointLabel += "\ndelay: " + point.time.ToString("F2") + " sec";
else
pointLabel += "\nNo delay";
}
else if (point == pointList.outroPoint)
pointLabel += "end point";
else
{
pointLabel += point.displayName;
if (point.time > 0)
pointLabel += "\ndelay: " + point.time.ToString("F2") + " sec";
else
pointLabel += "\ndelay indefinitely";
}
Handles.Label(point.worldPosition, pointLabel);
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
if (Handles.Button(point.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
if (i == selectedPointIndex)
{
CPPSlider(point);
}
}
}
private static void SceneGUIEaseBased()
{
CameraPathDelayList pointList = _cameraPath.delayList;
Camera sceneCamera = Camera.current;
int pointCount = pointList.realNumberOfPoints;
for (int i = 0; i < pointCount; i++)
{
CameraPathDelay point = pointList[i];
if (_cameraPath.enableUndo) Undo.RecordObject(point, "Modifying Ease Curves");
if (Vector3.Dot(sceneCamera.transform.forward, point.worldPosition - sceneCamera.transform.position) < 0)
continue;
string pointLabel = "";
if (point == pointList.introPoint)
pointLabel += "start point";
else if (point == pointList.outroPoint)
pointLabel += "end point";
else
{
pointLabel += point.displayName;
if (point.time > 0)
pointLabel += "\ndelay: " + point.time.ToString("F2") + " sec";
else
pointLabel += "\ndelay indefinitely";
}
Handles.Label(point.worldPosition, pointLabel);
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
Handles.color = (i == selectedPointIndex) ? _cameraPath.selectedPointColour : _cameraPath.unselectedPointColour;
if (Handles.Button(point.worldPosition, Quaternion.identity, pointHandleSize, pointHandleSize, Handles.DotCap))
{
ChangeSelectedPointIndex(i);
GUI.changed = true;
}
// float unitPercent = 0.5f;
Vector3 easeUp = Vector3.up * _cameraPath.pathLength * 0.1f;
Handles.color = CameraPathColours.RED;
if (point != pointList.outroPoint)
{
float outroEasePointPercent = _cameraPath.GetOutroEasePercentage(point);
Vector3 outroEasePoint = _cameraPath.GetPathPosition(outroEasePointPercent, true);
Vector3 outroeaseDirection = _cameraPath.GetPathDirection(outroEasePointPercent, false);
Handles.Label(outroEasePoint, "Ease Out\n" + point.displayName);
Vector3 newPosition = Handles.Slider(outroEasePoint, outroeaseDirection);
float movement = Vector3.Distance(outroEasePoint, newPosition);
if (movement > Mathf.Epsilon)
{
float newPercent = NearestmMousePercentage();
float curvePercent = _cameraPath.GetCurvePercentage(_cameraPath.delayList.GetPoint(point.index), _cameraPath.delayList.GetPoint(point.index + 1), newPercent);
point.outroEndEasePercentage = curvePercent;
}
float percentWidth = (outroEasePointPercent - point.percent);
// float easeSpace = _cameraPath.pathLength * percentWidth;
// float easeLength = unitPercent / percentWidth;
float percentMovement = percentWidth / 10.0f;
for (float e = point.percent; e < outroEasePointPercent; e += percentMovement)
{
float eB = e + percentMovement;
Vector3 lineStart = _cameraPath.GetPathPosition(e, true);
Vector3 lineEnd = _cameraPath.GetPathPosition(eB, true);
Handles.DrawLine(lineStart,lineEnd);
float animCurvePercentA = (e - point.percent) / percentWidth;
float animCurvePercentB = (eB - point.percent) / percentWidth;
Vector3 lineEaseUpA = easeUp * point.outroCurve.Evaluate(animCurvePercentA);
Vector3 lineEaseUpB = easeUp * point.outroCurve.Evaluate(animCurvePercentB);
Handles.DrawLine(lineStart + lineEaseUpA, lineEnd + lineEaseUpB);
}
}
if (point != pointList.introPoint)
{
float introEasePointPercent = _cameraPath.GetIntroEasePercentage(point);
Vector3 introEasePoint = _cameraPath.GetPathPosition(introEasePointPercent, true);
Vector3 introEaseDirection = _cameraPath.GetPathDirection(introEasePointPercent, false);
Handles.color = CameraPathColours.RED;
Handles.Label(introEasePoint, "Ease In\n" + point.displayName);
Vector3 newPosition = Handles.Slider(introEasePoint, -introEaseDirection);
float movement = Vector3.Distance(introEasePoint, newPosition);
if (movement > Mathf.Epsilon)
{
float newPercent = NearestmMousePercentage();
float curvePercent = 1-_cameraPath.GetCurvePercentage(_cameraPath.delayList.GetPoint(point.index-1), _cameraPath.delayList.GetPoint(point.index), newPercent);
point.introStartEasePercentage = curvePercent;
}
float percentWidth = (point.percent - introEasePointPercent);
// float easeSpace = _cameraPath.pathLength * percentWidth;
// float easeLength = unitPercent / percentWidth;
float percentMovement = percentWidth / 10.0f;
for (float e = introEasePointPercent; e < point.percent; e += percentMovement)
{
float eB = e + percentMovement;
Vector3 lineStart = _cameraPath.GetPathPosition(e, true);
Vector3 lineEnd = _cameraPath.GetPathPosition(eB, true);
Handles.DrawLine(lineStart, lineEnd);
float animCurvePercentA = (e - introEasePointPercent) / percentWidth;
float animCurvePercentB = (eB - introEasePointPercent) / percentWidth;
Vector3 lineEaseUpA = easeUp * point.introCurve.Evaluate(animCurvePercentA);
Vector3 lineEaseUpB = easeUp * point.introCurve.Evaluate(animCurvePercentB);
Handles.DrawLine(lineStart + lineEaseUpA, lineEnd + lineEaseUpB);
}
}
}
}
private static void DisplayAtPoint()
{
float atPercent = _cameraPath.addPointAtPercent;
Vector3 atPointVector = _cameraPath.GetPathPosition(atPercent,true);
float handleSize = HandleUtility.GetHandleSize(atPointVector);
Handles.color = Color.black;
Handles.DotCap(0, atPointVector, Quaternion.identity, handleSize*0.05f);
Handles.Label(atPointVector, "Add Point Here\nfrom Inspector");
}
private static void AddPathPoints()
{
if (SceneView.focusedWindow != null)
SceneView.focusedWindow.wantsMouseMove = true;
Handles.color = _cameraPath.unselectedPointColour;
int numberOfPoints = _cameraPath.realNumberOfPoints;
for (int i = 0; i < numberOfPoints; i++)
{
CameraPathControlPoint point = _cameraPath[i];
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE * 0.4f;
Handles.DotCap(0, point.worldPosition, Quaternion.identity, pointHandleSize);
}
float mousePercentage = NearestmMousePercentage();// _track.GetNearestPoint(mousePlanePoint);
Vector3 mouseTrackPoint = _cameraPath.GetPathPosition(mousePercentage, true);
Handles.Label(mouseTrackPoint, "Add New Path Point");
float newPointHandleSize = HandleUtility.GetHandleSize(mouseTrackPoint) * HANDLE_SCALE;
Quaternion lookDirection = Quaternion.LookRotation(Camera.current.transform.forward);
if (Handles.Button(mouseTrackPoint, lookDirection, newPointHandleSize, newPointHandleSize, Handles.DotCap))
{
int newPointIndex = _cameraPath.GetNextPointIndex(mousePercentage,false);
CameraPathControlPoint newPoint = _cameraPath.gameObject.AddComponent<CameraPathControlPoint>();//ScriptableObject.CreateInstance<CameraPathControlPoint>();
newPoint.worldPosition = mouseTrackPoint;
_cameraPath.InsertPoint(newPoint, newPointIndex);
ChangeSelectedPointIndex(newPointIndex);
GUI.changed = true;
}
}
private static void RemovePathPoints()
{
if (SceneView.focusedWindow != null)
SceneView.focusedWindow.wantsMouseMove = true;
int numberOfPoints = _cameraPath.realNumberOfPoints;
Handles.color = _cameraPath.selectedPointColour;
Ray mouseRay = Camera.current.ScreenPointToRay(new Vector3(Event.current.mousePosition.x, Screen.height - Event.current.mousePosition.y - 30, 0));
Quaternion mouseLookDirection = Quaternion.LookRotation(-mouseRay.direction);
for (int i = 0; i < numberOfPoints; i++)
{
CameraPathControlPoint point = _cameraPath[i];
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
Handles.Label(point.worldPosition, "Remove Point: "+point.displayName);
if (Handles.Button(point.worldPosition, mouseLookDirection, pointHandleSize, pointHandleSize, Handles.DotCap))
{
_cameraPath.RemovePoint(point);
GUI.changed = true;
return;
}
}
}
private static void AddCPathPoints()
{
if (SceneView.focusedWindow != null)
SceneView.focusedWindow.wantsMouseMove = true;
Handles.color = _cameraPath.selectedPointColour;
CameraPathPointList pointList = null;
switch(_pointMode)
{
case CameraPath.PointModes.AddOrientations:
pointList = _cameraPath.orientationList;
break;
case CameraPath.PointModes.AddFovs:
pointList = _cameraPath.fovList;
break;
case CameraPath.PointModes.AddTilts:
pointList = _cameraPath.tiltList;
break;
case CameraPath.PointModes.AddEvents:
pointList = _cameraPath.eventList;
break;
case CameraPath.PointModes.AddSpeeds:
pointList = _cameraPath.speedList;
break;
case CameraPath.PointModes.AddDelays:
pointList = _cameraPath.delayList;
break;
}
int numberOfPoints = pointList.realNumberOfPoints;
for (int i = 0; i < numberOfPoints; i++)
{
CameraPathPoint point = pointList[i];
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE * 0.4f;
Handles.DotCap(0, point.worldPosition, Quaternion.identity, pointHandleSize);
}
float mousePercentage = NearestmMousePercentage();// _track.GetNearestPoint(mousePlanePoint);
Vector3 mouseTrackPoint = _cameraPath.GetPathPosition(mousePercentage, true);
Handles.Label(mouseTrackPoint, "Add New Point");
float newPointHandleSize = HandleUtility.GetHandleSize(mouseTrackPoint) * HANDLE_SCALE;
Ray mouseRay = Camera.current.ScreenPointToRay(new Vector3(Event.current.mousePosition.x, Screen.height - Event.current.mousePosition.y - 30, 0));
Quaternion mouseLookDirection = Quaternion.LookRotation(-mouseRay.direction);
if (Handles.Button(mouseTrackPoint, mouseLookDirection, newPointHandleSize, newPointHandleSize, Handles.DotCap))
{
CameraPathControlPoint curvePointA = _cameraPath[_cameraPath.GetLastPointIndex(mousePercentage,false)];
CameraPathControlPoint curvePointB = _cameraPath[_cameraPath.GetNextPointIndex(mousePercentage,false)];
float curvePercentage = _cameraPath.GetCurvePercentage(curvePointA, curvePointB, mousePercentage);
switch(_pointMode)
{
case CameraPath.PointModes.AddOrientations:
Quaternion pointRotation = Quaternion.LookRotation(_cameraPath.GetPathDirection(mousePercentage));
CameraPathOrientation newOrientation = ((CameraPathOrientationList)pointList).AddOrientation(curvePointA, curvePointB, curvePercentage, pointRotation);
ChangeSelectedPointIndex(pointList.IndexOf(newOrientation));
break;
case CameraPath.PointModes.AddFovs:
float pointFOV = _cameraPath.fovList.GetValue(mousePercentage, CameraPathFOVList.ProjectionType.FOV);
float pointSize = _cameraPath.fovList.GetValue(mousePercentage, CameraPathFOVList.ProjectionType.Orthographic);
CameraPathFOV newFOVPoint = ((CameraPathFOVList)pointList).AddFOV(curvePointA, curvePointB, curvePercentage, pointFOV, pointSize);
ChangeSelectedPointIndex(pointList.IndexOf(newFOVPoint));
break;
case CameraPath.PointModes.AddTilts:
float pointTilt = _cameraPath.GetPathTilt(mousePercentage);
CameraPathTilt newTiltPoint = ((CameraPathTiltList)pointList).AddTilt(curvePointA, curvePointB, curvePercentage, pointTilt);
ChangeSelectedPointIndex(pointList.IndexOf(newTiltPoint));
break;
case CameraPath.PointModes.AddEvents:
CameraPathEvent newEventPoint = ((CameraPathEventList)pointList).AddEvent(curvePointA, curvePointB, curvePercentage);
ChangeSelectedPointIndex(pointList.IndexOf(newEventPoint));
break;
case CameraPath.PointModes.AddSpeeds:
_cameraPath.speedList.listEnabled = true;//if we're adding speeds then we probable want to enable it
CameraPathSpeed newSpeedPoint = ((CameraPathSpeedList)pointList).AddSpeedPoint(curvePointA, curvePointB, curvePercentage);
newSpeedPoint.speed = _animator.pathSpeed;
ChangeSelectedPointIndex(pointList.IndexOf(newSpeedPoint));
break;
case CameraPath.PointModes.AddDelays:
CameraPathDelay newDelayPoint = ((CameraPathDelayList)pointList).AddDelayPoint(curvePointA, curvePointB, curvePercentage);
ChangeSelectedPointIndex(pointList.IndexOf(newDelayPoint));
break;
}
GUI.changed = true;
}
}
private static void RemoveCPathPoints()
{
if (SceneView.focusedWindow != null)
SceneView.focusedWindow.wantsMouseMove = true;
CameraPathPointList pointList = null;
switch (_pointMode)
{
case CameraPath.PointModes.RemoveOrientations:
pointList = _cameraPath.orientationList;
break;
case CameraPath.PointModes.RemoveFovs:
pointList = _cameraPath.fovList;
break;
case CameraPath.PointModes.RemoveTilts:
pointList = _cameraPath.tiltList;
break;
case CameraPath.PointModes.RemoveEvents:
pointList = _cameraPath.eventList;
break;
case CameraPath.PointModes.RemoveSpeeds:
pointList = _cameraPath.speedList;
break;
case CameraPath.PointModes.RemoveDelays:
pointList = _cameraPath.delayList;
break;
}
int numberOfPoints = pointList.realNumberOfPoints;
Handles.color = _cameraPath.selectedPointColour;
Quaternion mouseLookDirection = Quaternion.LookRotation(Camera.current.transform.forward);
for (int i = 0; i < numberOfPoints; i++)
{
CameraPathPoint point = pointList[i];
float pointHandleSize = HandleUtility.GetHandleSize(point.worldPosition) * HANDLE_SCALE;
Handles.Label(point.worldPosition, "Remove Point " + i);
if (Handles.Button(point.worldPosition, mouseLookDirection, pointHandleSize, pointHandleSize, Handles.DotCap))
{
pointList.RemovePoint(point);
GUI.changed = true;
return;
}
}
}
private static void CPPSlider(CameraPathPoint point)
{
if(point.positionModes == CameraPathPoint.PositionModes.FixedToPercent)
return;//can't move fixed points
Vector3 pointPathDirection = _cameraPath.GetPathDirection(point.percent, false);
Handles.color = CameraPathColours.BLUE;
Vector3 newPosition = Handles.Slider(point.worldPosition, pointPathDirection);
newPosition = Handles.Slider(newPosition, -pointPathDirection);
float movement = Vector3.Distance(point.worldPosition, newPosition);
if (movement > Mathf.Epsilon)
{
//float newPercent = _cameraPath.GetNearestPoint(newPosition, false);
float newPercent = NearestmMousePercentage();
switch(point.positionModes)
{
case CameraPathPoint.PositionModes.Free:
CameraPathControlPoint curvePointA = _cameraPath[_cameraPath.GetLastPointIndex(newPercent, false)];
CameraPathControlPoint curvePointB = _cameraPath[_cameraPath.GetNextPointIndex(newPercent, false)];
point.cpointA = curvePointA;
point.cpointB = curvePointB;
point.curvePercentage = _cameraPath.GetCurvePercentage(curvePointA, curvePointB, newPercent);
break;
case CameraPathPoint.PositionModes.FixedToPoint:
point.positionModes = CameraPathPoint.PositionModes.Free;
CameraPathControlPoint newCurvePointA = _cameraPath[_cameraPath.GetLastPointIndex(newPercent, false)];
CameraPathControlPoint newCurvePointB = _cameraPath[_cameraPath.GetNextPointIndex(newPercent, false)];
if(newCurvePointA == newCurvePointB)
newCurvePointB = _cameraPath[_cameraPath.GetPointIndex(newCurvePointB.index- 1)];
point.cpointA = newCurvePointA;
point.cpointB = newCurvePointB;
point.curvePercentage = _cameraPath.GetCurvePercentage(newCurvePointA, newCurvePointB, newPercent);
break;
}
point.worldPosition = _cameraPath.GetPathPosition(point.percent, false);
_cameraPath.RecalculateStoredValues();
selectedPointIndex = point.index;
}
}
/// <summary>
/// Get the nearest point on the track curve to the mouse position
/// We essentailly project the track onto a 2D plane that is the editor camera and then find a point on that
/// </summary>
/// <returns>A percentage of the nearest point on the track curve to the nerest metre</returns>
private static float NearestmMousePercentage()
{
Camera cam = Camera.current;
float screenHeight = cam.pixelHeight;
Vector2 mousePos = Event.current.mousePosition;
mousePos.y = screenHeight - mousePos.y;
int numberOfSearchPoints = _cameraPath.storedValueArraySize;
Vector2 zeropoint = cam.WorldToScreenPoint(_cameraPath.GetPathPosition(0, true));
float nearestPointSqrMag = Vector2.SqrMagnitude(zeropoint - mousePos);
float nearestT = 0;
float nearestPointSqrMagB = Vector2.SqrMagnitude(zeropoint - mousePos);
float nearestTb = 0;
for (int i = 1; i < numberOfSearchPoints; i++)
{
float t = i / (float)numberOfSearchPoints;
Vector2 point = cam.WorldToScreenPoint(_cameraPath.GetPathPosition(t, true));
float thisPointMag = Vector2.SqrMagnitude(point - mousePos);
if (thisPointMag < nearestPointSqrMag)
{
nearestPointSqrMagB = nearestPointSqrMag;
nearestTb = nearestT;
nearestT = t;
nearestPointSqrMag = thisPointMag;
}
else
{
if (thisPointMag < nearestPointSqrMagB)
{
nearestTb = t;
nearestPointSqrMagB = thisPointMag;
}
}
}
float pointADist = Mathf.Sqrt(nearestPointSqrMag);
float pointBDist = Mathf.Sqrt(nearestPointSqrMagB);
float lerpvalue = pointADist / (pointADist + pointBDist);
return Mathf.Lerp(nearestT, nearestTb, lerpvalue);
}
private static void ChangeSelectedPointIndex(int newPointSelected)
{
selectedPointIndex = newPointSelected;
}
}
| |
using System;
using System.Net;
using System.IO;
using System.Collections;
using System.Threading;
namespace ByteFX.Data.Common
{
internal enum MultiHostStreamErrorType
{
Connecting,
Reading,
Writing
}
/// <summary>
/// Summary description for MultiHostStream.
/// </summary>
internal abstract class MultiHostStream : Stream
{
protected Stream stream;
protected int readTimeOut;
protected Exception baseException;
/// <summary>
/// Constructs a new MultiHostStream object with the given parameters
/// </summary>
/// <param name="hostList"></param>
/// <param name="port"></param>
/// <param name="readTimeOut"></param>
/// <param name="connectTimeOut"></param>
public MultiHostStream(string hostList, int port, int readTimeOut, int connectTimeOut)
{
this.readTimeOut = readTimeOut;
ProcessHosts( hostList, port, connectTimeOut );
}
// abstract members
protected abstract void TimeOut(MultiHostStreamErrorType error);
protected abstract void Error(string msg);
protected abstract bool CreateStream( IPAddress ip, string host, int port );
protected abstract bool CreateStream (string fileName);
protected abstract bool DataAvailable
{
get;
}
private void ProcessHosts( string hostList, int port, int connectTimeOut )
{
int startTime = Environment.TickCount;
int toTicks = connectTimeOut * 1000;
// support Unix sockets
if (hostList.StartsWith ("/"))
{
CreateStream (hostList);
return;
}
//
// Host name can contain multiple hosts, seperated by &.
string [] dnsHosts = hostList.Split('&');
Hashtable ips = new Hashtable();
//
// Each host name specified may contain multiple IP addresses
// Lets look at the DNS entries for each host name
foreach(string h in dnsHosts)
{
IPHostEntry hostAddress = Dns.GetHostByName(h);
foreach (IPAddress addr in hostAddress.AddressList)
ips.Add( addr, hostAddress.HostName );
}
IPAddress[] keys = new IPAddress[ ips.Count ];
ips.Keys.CopyTo( keys, 0 );
if ((Environment.TickCount - startTime) > toTicks)
{
TimeOut(MultiHostStreamErrorType.Connecting);
return;
}
// make sure they gave us at least one host
if (ips.Count == 0)
{
Error("You must specify at least one host");
return;
}
int index = 0;
// now choose a random server if there are more than one
if (ips.Count > 1)
{
System.Random random = new Random((int)DateTime.Now.Ticks);
index = random.Next(ips.Count-1);
}
//
// Lets step through our hosts until we get a connection
for (int i=0; i < ips.Count; i++)
{
if ((Environment.TickCount - startTime) > toTicks)
{
TimeOut(MultiHostStreamErrorType.Connecting);
return;
}
if (CreateStream( (IPAddress)keys[i], (string)ips[keys[i]], port ))
return;
}
}
public override int ReadByte()
{
int start = Environment.TickCount;
int ticks = readTimeOut * 1000;
while ((Environment.TickCount - start) < ticks)
{
if (DataAvailable)
{
int b = stream.ReadByte();
return b;
}
else
Thread.Sleep(0);
}
TimeOut(MultiHostStreamErrorType.Reading);
return -1;
}
public override int Read(byte[] buffer, int offset, int count)
{
int numToRead = count;
int start = Environment.TickCount;
int ticks = readTimeOut * 1000;
try
{
while (numToRead > 0 && (Environment.TickCount - start) < ticks)
{
if (DataAvailable)
{
int bytes_read = stream.Read( buffer, offset, numToRead);
if (bytes_read == 0)
return (count - numToRead);
offset += bytes_read;
numToRead -= bytes_read;
}
else
Thread.Sleep(0);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
if (numToRead > 0)
TimeOut(MultiHostStreamErrorType.Reading);
return count;
}
public override bool CanRead
{
get { return stream.CanRead; }
}
public override bool CanWrite
{
get { return stream.CanWrite; }
}
public override bool CanSeek
{
get { return stream.CanSeek; }
}
public override long Length
{
get { return stream.Length; }
}
public override long Position
{
get { return stream.Position; }
set { stream.Position = value; }
}
public override void Flush()
{
stream.Flush();
}
public override void SetLength(long length)
{
stream.SetLength( length );
}
public override void Write(byte[] buffer, int offset, int count)
{
stream.Write( buffer, offset, count );
}
public override long Seek( long offset, SeekOrigin origin )
{
return stream.Seek( offset, origin );
}
}
}
| |
using System;
using System.Configuration;
using System.IO;
using System.Reflection;
using System.Web.Http;
using Serilog;
namespace Thinktecture.Relay.Server.Config
{
internal class Configuration : IConfiguration
{
// RabbitMQ Settings
public string RabbitMqConnectionString { get; }
public string RabbitMqClusterHosts { get; }
public bool RabbitMqAutomaticRecoveryEnabled { get; }
public TimeSpan QueueExpiration { get; }
public TimeSpan RequestExpiration { get; }
// App Settings
public TimeSpan OnPremiseConnectorCallbackTimeout { get; }
public string TraceFileDirectory { get; }
public int LinkPasswordLength { get; }
public int DisconnectTimeout { get; }
public int ConnectionTimeout { get; }
public int KeepAliveInterval { get; }
public bool UseInsecureHttp { get; }
public ModuleBinding EnableManagementWeb { get; }
public ModuleBinding EnableRelaying { get; }
public ModuleBinding EnableOnPremiseConnections { get; }
public string HostName { get; }
public int Port { get; }
public string ManagementWebLocation { get; }
public TimeSpan TemporaryRequestStoragePeriod { get; }
public string TemporaryRequestStoragePath { get; }
public TimeSpan ActiveConnectionTimeout { get; }
public string CustomCodeAssemblyPath { get; set; }
public string SharedSecret { get; }
public string OAuthCertificate { get; }
public TimeSpan HstsHeaderMaxAge { get; }
public bool HstsIncludeSubdomains { get; }
public IncludeErrorDetailPolicy IncludeErrorDetailPolicy { get; }
public int MaxFailedLoginAttempts { get; }
public TimeSpan FailedLoginLockoutPeriod { get; }
public bool SecureClientController { get; }
public TimeSpan AccessTokenLifetime { get; }
public bool LogSensitiveData { get; }
public bool RequireLinkAvailability { get; }
// Default settings for links
public TimeSpan LinkTokenRefreshWindow { get; }
public TimeSpan LinkReconnectMinWaitTime { get; }
public TimeSpan LinkReconnectMaxWaitTime { get; }
public TimeSpan? LinkAbsoluteConnectionLifetime { get; }
public TimeSpan? LinkSlidingConnectionLifetime { get; }
public Configuration(ILogger logger)
{
var settings = ConfigurationManager.ConnectionStrings["RabbitMQ"];
if (settings != null)
{
RabbitMqConnectionString = settings.ConnectionString;
}
RabbitMqClusterHosts = GetValue(nameof(RabbitMqClusterHosts));
if (String.IsNullOrWhiteSpace(RabbitMqClusterHosts))
{
RabbitMqClusterHosts = null;
}
RabbitMqAutomaticRecoveryEnabled = true;
if (Boolean.TryParse(GetValue(nameof(RabbitMqAutomaticRecoveryEnabled)), out var tmpBool))
{
RabbitMqAutomaticRecoveryEnabled = tmpBool;
}
QueueExpiration = TimeSpan.FromSeconds(10);
if (TimeSpan.TryParse(GetValue(nameof(QueueExpiration)), out var tmpTimeSpan))
{
QueueExpiration = tmpTimeSpan;
}
RequestExpiration = TimeSpan.FromSeconds(10);
if (TimeSpan.TryParse(GetValue(nameof(RequestExpiration)), out tmpTimeSpan))
{
RequestExpiration = tmpTimeSpan;
}
OnPremiseConnectorCallbackTimeout = TimeSpan.FromSeconds(30);
if (TimeSpan.TryParse(GetValue(nameof(OnPremiseConnectorCallbackTimeout)), out tmpTimeSpan))
{
OnPremiseConnectorCallbackTimeout = tmpTimeSpan;
}
TraceFileDirectory = GetPathValue(nameof(TraceFileDirectory), logger) ?? "tracefiles";
LinkPasswordLength = 100;
if (Int32.TryParse(GetValue(nameof(LinkPasswordLength)), out var tmpInt))
{
LinkPasswordLength = tmpInt;
}
DisconnectTimeout = 6;
if (Int32.TryParse(GetValue(nameof(DisconnectTimeout)), out tmpInt))
{
DisconnectTimeout = tmpInt;
}
ConnectionTimeout = 5;
if (Int32.TryParse(GetValue(nameof(ConnectionTimeout)), out tmpInt))
{
ConnectionTimeout = tmpInt;
}
KeepAliveInterval = DisconnectTimeout / 3;
if (Int32.TryParse(GetValue(nameof(KeepAliveInterval)), out tmpInt) && tmpInt >= KeepAliveInterval)
{
KeepAliveInterval = tmpInt;
}
UseInsecureHttp = false;
if (Boolean.TryParse(GetValue(nameof(UseInsecureHttp)), out tmpBool))
{
UseInsecureHttp = tmpBool;
}
EnableManagementWeb = ModuleBinding.True;
if (Enum.TryParse(GetValue(nameof(EnableManagementWeb)), true, out ModuleBinding tmpModuleBinding))
{
EnableManagementWeb = tmpModuleBinding;
}
EnableRelaying = ModuleBinding.True;
if (Enum.TryParse(GetValue(nameof(EnableRelaying)), true, out tmpModuleBinding))
{
EnableRelaying = tmpModuleBinding;
}
EnableOnPremiseConnections = ModuleBinding.True;
if (Enum.TryParse(GetValue(nameof(EnableOnPremiseConnections)), true, out tmpModuleBinding))
{
EnableOnPremiseConnections = tmpModuleBinding;
}
HostName = GetValue(nameof(HostName)) ?? "+";
Port = UseInsecureHttp ? 20000 : 443;
if (Int32.TryParse(GetValue(nameof(Port)), out tmpInt))
{
Port = tmpInt;
}
ManagementWebLocation = GetPathValue(nameof(ManagementWebLocation), logger);
if (String.IsNullOrWhiteSpace(ManagementWebLocation))
{
ManagementWebLocation = "ManagementWeb";
}
TemporaryRequestStoragePath = GetPathValue(nameof(TemporaryRequestStoragePath), logger);
if (String.IsNullOrWhiteSpace(TemporaryRequestStoragePath))
{
TemporaryRequestStoragePath = null;
}
TemporaryRequestStoragePeriod = OnPremiseConnectorCallbackTimeout + OnPremiseConnectorCallbackTimeout;
if (TimeSpan.TryParse(GetValue(nameof(TemporaryRequestStoragePeriod)), out tmpTimeSpan) && tmpTimeSpan >= TemporaryRequestStoragePeriod)
{
TemporaryRequestStoragePeriod = tmpTimeSpan;
}
ActiveConnectionTimeout = TimeSpan.FromMinutes(2);
if (TimeSpan.TryParse(GetValue(nameof(ActiveConnectionTimeout)), out tmpTimeSpan))
{
ActiveConnectionTimeout = tmpTimeSpan;
}
CustomCodeAssemblyPath = GetPathValue(nameof(CustomCodeAssemblyPath), logger);
if (String.IsNullOrWhiteSpace(CustomCodeAssemblyPath))
{
CustomCodeAssemblyPath = null;
}
else if (!File.Exists(CustomCodeAssemblyPath))
{
logger?.Warning("A custom code assembly has been configured, but it is not available at the configured path. assembly-path={CustomCodeAssemblyPath}", CustomCodeAssemblyPath);
CustomCodeAssemblyPath = null;
}
SharedSecret = GetValue(nameof(SharedSecret));
OAuthCertificate = GetValue(nameof(OAuthCertificate));
if (String.IsNullOrEmpty(SharedSecret) && String.IsNullOrEmpty(OAuthCertificate))
{
if (String.IsNullOrEmpty(TemporaryRequestStoragePath)) // assume Multi-Server operation mode when this folder is configured
{
logger?.Warning("No SharedSecret or OAuthCertificate is configured. Please configure one of them. Continuing with a random value which will make all tokens invalid on restart.");
SharedSecret = Convert.ToBase64String(Guid.NewGuid().ToByteArray());
}
else
{
var message = "No SharedSecret or OAuthCertificate is configured, and RelayServer is set up for Multi-Server operation. You need to configure either SharedSecret or OAuthCertificate before starting RelayServer.";
logger?.Error(message);
throw new ConfigurationErrorsException(message);
}
}
HstsHeaderMaxAge = TimeSpan.FromDays(365);
if (TimeSpan.TryParse(GetValue(nameof(HstsHeaderMaxAge)), out tmpTimeSpan))
{
HstsHeaderMaxAge = tmpTimeSpan;
}
HstsIncludeSubdomains = false;
if (Boolean.TryParse(GetValue(nameof(HstsIncludeSubdomains)), out tmpBool))
{
HstsIncludeSubdomains = tmpBool;
}
IncludeErrorDetailPolicy = IncludeErrorDetailPolicy.Default;
if (Enum.TryParse(GetValue(nameof(IncludeErrorDetailPolicy)), true, out IncludeErrorDetailPolicy tmpIncludeErrorDetailPolicy))
{
IncludeErrorDetailPolicy = tmpIncludeErrorDetailPolicy;
}
MaxFailedLoginAttempts = 5;
if (Int32.TryParse(GetValue(nameof(MaxFailedLoginAttempts)), out tmpInt))
{
MaxFailedLoginAttempts = tmpInt;
}
FailedLoginLockoutPeriod = TimeSpan.FromMinutes(15);
if (TimeSpan.TryParse(GetValue(nameof(FailedLoginLockoutPeriod)), out tmpTimeSpan))
{
FailedLoginLockoutPeriod = tmpTimeSpan;
}
SecureClientController = false;
if (Boolean.TryParse(GetValue(nameof(SecureClientController)), out tmpBool))
{
SecureClientController = tmpBool;
}
AccessTokenLifetime = TimeSpan.FromDays(365);
if (TimeSpan.TryParse(GetValue(nameof(AccessTokenLifetime)), out tmpTimeSpan))
{
AccessTokenLifetime = tmpTimeSpan;
}
LogSensitiveData = true;
if (Boolean.TryParse(GetValue(nameof(LogSensitiveData)), out tmpBool))
{
LogSensitiveData = tmpBool;
}
RequireLinkAvailability = false;
if (Boolean.TryParse(GetValue(nameof(RequireLinkAvailability)), out tmpBool))
{
RequireLinkAvailability = tmpBool;
}
LinkTokenRefreshWindow = TimeSpan.FromMinutes(1);
if (TimeSpan.TryParse(GetValue(nameof(LinkTokenRefreshWindow)), out tmpTimeSpan) && tmpTimeSpan < AccessTokenLifetime)
{
LinkTokenRefreshWindow = tmpTimeSpan;
}
LinkReconnectMinWaitTime = TimeSpan.FromSeconds(2);
if (TimeSpan.TryParse(GetValue(nameof(LinkReconnectMinWaitTime)), out tmpTimeSpan))
{
LinkReconnectMinWaitTime = tmpTimeSpan;
}
LinkReconnectMaxWaitTime = TimeSpan.FromSeconds(30);
if (TimeSpan.TryParse(GetValue(nameof(LinkReconnectMaxWaitTime)), out tmpTimeSpan) && tmpTimeSpan > LinkReconnectMinWaitTime)
{
LinkReconnectMaxWaitTime = tmpTimeSpan;
}
else if (LinkReconnectMaxWaitTime < LinkReconnectMinWaitTime)
{
// something is fishy in the config
LinkReconnectMaxWaitTime = LinkReconnectMinWaitTime + TimeSpan.FromSeconds(30);
}
LinkAbsoluteConnectionLifetime = null;
if (TimeSpan.TryParse(GetValue(nameof(LinkAbsoluteConnectionLifetime)), out tmpTimeSpan))
{
LinkAbsoluteConnectionLifetime = tmpTimeSpan;
}
LinkSlidingConnectionLifetime = null;
if (TimeSpan.TryParse(GetValue(nameof(LinkSlidingConnectionLifetime)), out tmpTimeSpan))
{
LinkSlidingConnectionLifetime = tmpTimeSpan;
}
LogSettings(logger);
}
private string GetValue(string settingName)
{
return Environment.GetEnvironmentVariable($"RelayServer__{settingName}")
?? ConfigurationManager.AppSettings[settingName];
}
private string GetPathValue(string settingName, ILogger logger)
{
var value = GetValue(settingName);
if (String.IsNullOrEmpty(value))
{
return null;
}
if (!Path.IsPathRooted(value))
{
var basePath = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
// we have a relative path, so we need to combine it with current execution directory
logger?.Verbose($"Configured path for {{SettingName}} is relative ({{{settingName}}}) to base path {{BasePath}} " , settingName, value, basePath);
value = Path.GetFullPath(Path.Combine(basePath, value));
logger?.Verbose($"Converted path for {{SettingName}} is absolute: {{{settingName}}}" , settingName, value);
}
else
{
logger?.Verbose($"Configured path for {{SettingName}} is absolute: {{{settingName}}}" , settingName, value);
}
return value;
}
private void LogSettings(ILogger logger)
{
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(RabbitMqConnectionString), RabbitMqConnectionString);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(RabbitMqClusterHosts), RabbitMqClusterHosts ?? "not defined - using single host");
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(RabbitMqAutomaticRecoveryEnabled), RabbitMqAutomaticRecoveryEnabled);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(QueueExpiration), QueueExpiration);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(RequestExpiration), RequestExpiration);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(OnPremiseConnectorCallbackTimeout), OnPremiseConnectorCallbackTimeout);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(TraceFileDirectory), TraceFileDirectory);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LinkPasswordLength), LinkPasswordLength);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(DisconnectTimeout), DisconnectTimeout);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(ConnectionTimeout), ConnectionTimeout);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(KeepAliveInterval), KeepAliveInterval);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(UseInsecureHttp), UseInsecureHttp);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(EnableManagementWeb), EnableManagementWeb);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(EnableRelaying), EnableRelaying);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(EnableOnPremiseConnections), EnableOnPremiseConnections);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(HostName), HostName);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(Port), Port);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(ManagementWebLocation), ManagementWebLocation);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(TemporaryRequestStoragePath), TemporaryRequestStoragePath ?? "not defined - using in-memory store");
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(TemporaryRequestStoragePeriod), TemporaryRequestStoragePeriod);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(ActiveConnectionTimeout), ActiveConnectionTimeout);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(CustomCodeAssemblyPath), CustomCodeAssemblyPath);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(SharedSecret), SharedSecret);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(OAuthCertificate), OAuthCertificate);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(HstsHeaderMaxAge), HstsHeaderMaxAge);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(HstsIncludeSubdomains), HstsIncludeSubdomains);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(IncludeErrorDetailPolicy), IncludeErrorDetailPolicy);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(MaxFailedLoginAttempts), MaxFailedLoginAttempts);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(FailedLoginLockoutPeriod), FailedLoginLockoutPeriod);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(SecureClientController), SecureClientController);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(AccessTokenLifetime), AccessTokenLifetime);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LogSensitiveData), LogSensitiveData);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LinkTokenRefreshWindow), LinkTokenRefreshWindow);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LinkReconnectMinWaitTime), LinkReconnectMinWaitTime);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LinkReconnectMaxWaitTime), LinkReconnectMaxWaitTime);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LinkAbsoluteConnectionLifetime), LinkAbsoluteConnectionLifetime);
logger?.Verbose("Setting {ConfigurationProperty}: {ConfigurationValue}", nameof(LinkSlidingConnectionLifetime), LinkSlidingConnectionLifetime);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.SignalR;
using Microsoft.AspNetCore.Testing;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Microsoft.JSInterop;
using Moq;
using Xunit;
namespace Microsoft.AspNetCore.Components.Server.Circuits
{
public class RemoteJSDataStreamTest
{
private static readonly TestRemoteJSRuntime _jsRuntime = new(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
[Fact]
public async Task CreateRemoteJSDataStreamAsync_CreatesStream()
{
// Arrange
var jsStreamReference = Mock.Of<IJSStreamReference>();
// Act
var remoteJSDataStream = await RemoteJSDataStream.CreateRemoteJSDataStreamAsync(_jsRuntime, jsStreamReference, totalLength: 100, signalRMaximumIncomingBytes: 10_000, jsInteropDefaultCallTimeout: TimeSpan.FromMinutes(1), cancellationToken: CancellationToken.None).DefaultTimeout();
// Assert
Assert.NotNull(remoteJSDataStream);
}
[Fact]
public async Task ReceiveData_DoesNotFindStream()
{
// Arrange
var chunk = new byte[] { 3, 5, 6, 7 };
var unrecognizedGuid = 10;
// Act
var success = await RemoteJSDataStream.ReceiveData(_jsRuntime, streamId: unrecognizedGuid, chunkId: 0, chunk, error: null).DefaultTimeout();
// Assert
Assert.False(success);
}
[Fact]
public async Task ReceiveData_SuccessReadsBackStream()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var remoteJSDataStream = await CreateRemoteJSDataStreamAsync(jsRuntime);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[100];
var random = new Random();
random.NextBytes(chunk);
var sendDataTask = Task.Run(async () =>
{
// Act 1
var success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout();
return success;
});
// Act & Assert 2
using var memoryStream = new MemoryStream();
await remoteJSDataStream.CopyToAsync(memoryStream).DefaultTimeout();
Assert.Equal(chunk, memoryStream.ToArray());
// Act & Assert 3
var sendDataCompleted = await sendDataTask.DefaultTimeout();
Assert.True(sendDataCompleted);
}
[Fact]
public async Task ReceiveData_SuccessReadsBackPipeReader()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var remoteJSDataStream = await CreateRemoteJSDataStreamAsync(jsRuntime);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[100];
var random = new Random();
random.NextBytes(chunk);
var sendDataTask = Task.Run(async () =>
{
// Act 1
var success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout();
return success;
});
// Act & Assert 2
using var memoryStream = new MemoryStream();
await remoteJSDataStream.PipeReader.CopyToAsync(memoryStream).DefaultTimeout();
Assert.Equal(chunk, memoryStream.ToArray());
// Act & Assert 3
var sendDataCompleted = await sendDataTask.DefaultTimeout();
Assert.True(sendDataCompleted);
}
[Fact]
public async Task ReceiveData_WithError()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var remoteJSDataStream = await CreateRemoteJSDataStreamAsync(jsRuntime);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
// Act & Assert 1
var success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk: null, error: "some error").DefaultTimeout();
Assert.False(success);
// Act & Assert 2
using var mem = new MemoryStream();
var ex = await Assert.ThrowsAsync<InvalidOperationException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("An error occurred while reading the remote stream: some error", ex.Message);
}
[Fact]
public async Task ReceiveData_WithZeroLengthChunk()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var remoteJSDataStream = await CreateRemoteJSDataStreamAsync(jsRuntime);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = Array.Empty<byte>();
// Act & Assert 1
var ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout());
Assert.Equal("The incoming data chunk cannot be empty.", ex.Message);
// Act & Assert 2
using var mem = new MemoryStream();
ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("The incoming data chunk cannot be empty.", ex.Message);
}
[Fact]
public async Task ReceiveData_WithLargerChunksThanPermitted()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var remoteJSDataStream = await CreateRemoteJSDataStreamAsync(jsRuntime);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[50_000]; // more than the 32k maximum chunk size
// Act & Assert 1
var ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout());
Assert.Equal("The incoming data chunk exceeded the permitted length.", ex.Message);
// Act & Assert 2
using var mem = new MemoryStream();
ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("The incoming data chunk exceeded the permitted length.", ex.Message);
}
[Fact]
public async Task ReceiveData_ProvidedWithMoreBytesThanRemaining()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var jsStreamReference = Mock.Of<IJSStreamReference>();
var remoteJSDataStream = await RemoteJSDataStream.CreateRemoteJSDataStreamAsync(jsRuntime, jsStreamReference, totalLength: 100, signalRMaximumIncomingBytes: 10_000, jsInteropDefaultCallTimeout: TimeSpan.FromMinutes(1), cancellationToken: CancellationToken.None);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[110]; // 100 byte totalLength for stream
// Act & Assert 1
var ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout());
Assert.Equal("The incoming data stream declared a length 100, but 110 bytes were sent.", ex.Message);
// Act & Assert 2
using var mem = new MemoryStream();
ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("The incoming data stream declared a length 100, but 110 bytes were sent.", ex.Message);
}
[Fact]
public async Task ReceiveData_ProvidedWithOutOfOrderChunk_SimulatesSignalRDisconnect()
{
// Arrange
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
var jsStreamReference = Mock.Of<IJSStreamReference>();
var remoteJSDataStream = await RemoteJSDataStream.CreateRemoteJSDataStreamAsync(jsRuntime, jsStreamReference, totalLength: 100, signalRMaximumIncomingBytes: 10_000, jsInteropDefaultCallTimeout: TimeSpan.FromMinutes(1), cancellationToken: CancellationToken.None);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[5];
// Act & Assert 1
for (var i = 0; i < 5; i++)
{
await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: i, chunk, error: null);
}
var ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 7, chunk, error: null).DefaultTimeout());
Assert.Equal("Out of sequence chunk received, expected 5, but received 7.", ex.Message);
// Act & Assert 2
using var mem = new MemoryStream();
ex = await Assert.ThrowsAsync<EndOfStreamException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("Out of sequence chunk received, expected 5, but received 7.", ex.Message);
}
[Fact]
public async Task ReceiveData_NoDataProvidedBeforeTimeout_StreamDisposed()
{
// Arrange
var unhandledExceptionRaisedTask = new TaskCompletionSource<bool>();
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
jsRuntime.UnhandledException += (_, ex) =>
{
Assert.Equal("Did not receive any data in the allotted time.", ex.Message);
unhandledExceptionRaisedTask.SetResult(ex is TimeoutException);
};
var jsStreamReference = Mock.Of<IJSStreamReference>();
var remoteJSDataStream = await RemoteJSDataStream.CreateRemoteJSDataStreamAsync(
jsRuntime,
jsStreamReference,
totalLength: 15,
signalRMaximumIncomingBytes: 10_000,
jsInteropDefaultCallTimeout: TimeSpan.FromSeconds(2),
cancellationToken: CancellationToken.None);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[] { 3, 5, 7 };
// Act & Assert 1
// Trigger timeout and ensure unhandled exception raised to crush circuit
remoteJSDataStream.InvalidateLastDataReceivedTimeForTimeout();
var unhandledExceptionResult = await unhandledExceptionRaisedTask.Task.DefaultTimeout();
Assert.True(unhandledExceptionResult);
// Act & Assert 2
// Confirm exception also raised on pipe reader
using var mem = new MemoryStream();
var ex = await Assert.ThrowsAsync<TimeoutException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("Did not receive any data in the allotted time.", ex.Message);
// Act & Assert 3
// Ensures stream is disposed after the timeout and any additional chunks aren't accepted
var success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout();
Assert.False(success);
}
[Fact]
public async Task ReceiveData_ReceivesDataThenTimesout_StreamDisposed()
{
// Arrange
var unhandledExceptionRaisedTask = new TaskCompletionSource<bool>();
var jsRuntime = new TestRemoteJSRuntime(Options.Create(new CircuitOptions()), Options.Create(new HubOptions()), Mock.Of<ILogger<RemoteJSRuntime>>());
jsRuntime.UnhandledException += (_, ex) =>
{
Assert.Equal("Did not receive any data in the allotted time.", ex.Message);
unhandledExceptionRaisedTask.SetResult(ex is TimeoutException);
};
var jsStreamReference = Mock.Of<IJSStreamReference>();
var remoteJSDataStream = await RemoteJSDataStream.CreateRemoteJSDataStreamAsync(
jsRuntime,
jsStreamReference,
totalLength: 15,
signalRMaximumIncomingBytes: 10_000,
jsInteropDefaultCallTimeout: TimeSpan.FromSeconds(3),
cancellationToken: CancellationToken.None);
var streamId = GetStreamId(remoteJSDataStream, jsRuntime);
var chunk = new byte[] { 3, 5, 7 };
// Act & Assert 1
var success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 0, chunk, error: null).DefaultTimeout();
Assert.True(success);
// Act & Assert 2
success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 1, chunk, error: null).DefaultTimeout();
Assert.True(success);
// Act & Assert 3
// Trigger timeout and ensure unhandled exception raised to crush circuit
remoteJSDataStream.InvalidateLastDataReceivedTimeForTimeout();
var unhandledExceptionResult = await unhandledExceptionRaisedTask.Task.DefaultTimeout();
Assert.True(unhandledExceptionResult);
// Act & Assert 4
// Confirm exception also raised on pipe reader
using var mem = new MemoryStream();
var ex = await Assert.ThrowsAsync<TimeoutException>(async () => await remoteJSDataStream.CopyToAsync(mem).DefaultTimeout());
Assert.Equal("Did not receive any data in the allotted time.", ex.Message);
// Act & Assert 5
// Ensures stream is disposed after the timeout and any additional chunks aren't accepted
success = await RemoteJSDataStream.ReceiveData(jsRuntime, streamId, chunkId: 2, chunk, error: null).DefaultTimeout();
Assert.False(success);
}
private static async Task<RemoteJSDataStream> CreateRemoteJSDataStreamAsync(TestRemoteJSRuntime jsRuntime = null)
{
var jsStreamReference = Mock.Of<IJSStreamReference>();
var remoteJSDataStream = await RemoteJSDataStream.CreateRemoteJSDataStreamAsync(jsRuntime ?? _jsRuntime, jsStreamReference, totalLength: 100, signalRMaximumIncomingBytes: 10_000, jsInteropDefaultCallTimeout: TimeSpan.FromMinutes(1), cancellationToken: CancellationToken.None);
return remoteJSDataStream;
}
private static long GetStreamId(RemoteJSDataStream stream, RemoteJSRuntime runtime) =>
runtime.RemoteJSDataStreamInstances.FirstOrDefault(kvp => kvp.Value == stream).Key;
class TestRemoteJSRuntime : RemoteJSRuntime, IJSRuntime
{
public TestRemoteJSRuntime(IOptions<CircuitOptions> circuitOptions, IOptions<HubOptions> hubOptions, ILogger<RemoteJSRuntime> logger) : base(circuitOptions, hubOptions, logger)
{
}
public new ValueTask<TValue> InvokeAsync<TValue>(string identifier, object[] args)
{
Assert.Equal("Blazor._internal.sendJSDataStream", identifier);
return ValueTask.FromResult<TValue>(default);
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Runtime.InteropServices;
using size_t = System.UInt64;
using curl_socket_t = System.Int32;
using curl_off_t = System.Int64;
internal static partial class Interop
{
internal static partial class libcurl
{
// Class for constants defined for the enum CURLoption in curl.h
internal static partial class CURLoption
{
// Curl options are of the format <type base> + <n>
private const int CurlOptionLongBase = 0;
private const int CurlOptionObjectPointBase = 10000;
private const int CurlOptionFunctionPointBase = 20000;
internal const int CURLOPT_INFILESIZE = CurlOptionLongBase + 14;
internal const int CURLOPT_VERBOSE = CurlOptionLongBase + 41;
internal const int CURLOPT_NOBODY = CurlOptionLongBase + 44;
internal const int CURLOPT_UPLOAD = CurlOptionLongBase + 46;
internal const int CURLOPT_POST = CurlOptionLongBase + 47;
internal const int CURLOPT_FOLLOWLOCATION = CurlOptionLongBase + 52;
internal const int CURLOPT_PROXYPORT = CurlOptionLongBase + 59;
internal const int CURLOPT_POSTFIELDSIZE = CurlOptionLongBase + 60;
internal const int CURLOPT_MAXREDIRS = CurlOptionLongBase + 68;
internal const int CURLOPT_NOSIGNAL = CurlOptionLongBase + 99;
internal const int CURLOPT_PROXYTYPE = CurlOptionLongBase + 101;
internal const int CURLOPT_HTTPAUTH = CurlOptionLongBase + 107;
internal const int CURLOPT_PROTOCOLS = CurlOptionLongBase + 181;
internal const int CURLOPT_REDIR_PROTOCOLS = CurlOptionLongBase + 182;
internal const int CURLOPT_WRITEDATA = CurlOptionObjectPointBase + 1;
internal const int CURLOPT_URL = CurlOptionObjectPointBase + 2;
internal const int CURLOPT_PROXY = CurlOptionObjectPointBase + 4;
internal const int CURLOPT_PROXYUSERPWD = CurlOptionObjectPointBase + 6;
internal const int CURLOPT_READDATA = CurlOptionObjectPointBase + 9;
internal const int CURLOPT_COOKIE = CurlOptionObjectPointBase + 22;
internal const int CURLOPT_HTTPHEADER = CurlOptionObjectPointBase + 23;
internal const int CURLOPT_HEADERDATA = CurlOptionObjectPointBase + 29;
internal const int CURLOPT_ACCEPTENCODING = CurlOptionObjectPointBase + 102;
internal const int CURLOPT_PRIVATE = CurlOptionObjectPointBase + 103;
internal const int CURLOPT_COPYPOSTFIELDS = CurlOptionObjectPointBase + 165;
internal const int CURLOPT_SEEKDATA = CurlOptionObjectPointBase + 168;
internal const int CURLOPT_USERNAME = CurlOptionObjectPointBase + 173;
internal const int CURLOPT_PASSWORD = CurlOptionObjectPointBase + 174;
internal const int CURLOPT_WRITEFUNCTION = CurlOptionFunctionPointBase + 11;
internal const int CURLOPT_READFUNCTION = CurlOptionFunctionPointBase + 12;
internal const int CURLOPT_HEADERFUNCTION = CurlOptionFunctionPointBase + 79;
internal const int CURLOPT_SEEKFUNCTION = CurlOptionFunctionPointBase + 167;
}
// Class for constants defined for the enum CURLINFO in curl.h
internal static partial class CURLINFO
{
// Curl info are of the format <type base> + <n>
private const int CurlInfoStringBase = 0x100000;
private const int CurlInfoLongBase = 0x200000;
internal const int CURLINFO_PRIVATE = CurlInfoStringBase + 21;
internal const int CURLINFO_HTTPAUTH_AVAIL = CurlInfoLongBase + 23;
}
// Class for constants defined for the enum curl_proxytype in curl.h
internal static partial class curl_proxytype
{
internal const int CURLPROXY_HTTP = 0;
}
// Class for constants defined for the enum CURLMcode in multi.h
internal static partial class CURLMcode
{
internal const int CURLM_OK = 0;
internal const int CURLM_BAD_HANDLE = 1;
internal const int CURLM_BAD_EASY_HANDLE = 2;
internal const int CURLM_OUT_OF_MEMORY = 3;
internal const int CURLM_INTERNAL_ERROR = 4;
internal const int CURLM_BAD_SOCKET = 5;
internal const int CURLM_UNKNOWN_OPTION = 6;
internal const int CURLM_ADDED_ALREADY = 7;
}
// Class for constants defined for the results of CURL_SEEKFUNCTION
internal static partial class CURL_SEEKFUNC
{
internal const int CURL_SEEKFUNC_OK = 0;
internal const int CURL_SEEKFUNC_FAIL = 1;
internal const int CURL_SEEKFUNC_CANTSEEK = 2;
}
// Class for constants defined for the enum CURLMSG in multi.h
internal static partial class CURLMSG
{
internal const int CURLMSG_DONE = 1;
}
// AUTH related constants
internal static partial class CURLAUTH
{
internal const ulong None = 0;
internal const ulong Basic = 1 << 0;
internal const ulong Digest = 1 << 1;
internal const ulong Negotiate = 1 << 2;
internal const ulong DigestIE = 1 << 4;
internal const ulong AuthAny = ~DigestIE;
}
internal static partial class CURL_VERSION_Features
{
internal const int CURL_VERSION_IPV6 = (1<<0);
internal const int CURL_VERSION_KERBEROS4 = (1<<1);
internal const int CURL_VERSION_SSL = (1<<2);
internal const int CURL_VERSION_LIBZ = (1<<3);
internal const int CURL_VERSION_NTLM = (1<<4);
internal const int CURL_VERSION_GSSNEGOTIATE = (1<<5);
internal const int CURL_VERSION_DEBUG = (1<<6);
internal const int CURL_VERSION_ASYNCHDNS = (1<<7);
internal const int CURL_VERSION_SPNEGO = (1<<8);
internal const int CURL_VERSION_LARGEFILE = (1<<9);
internal const int CURL_VERSION_IDN = (1<<10);
internal const int CURL_VERSION_SSPI = (1<<11);
internal const int CURL_VERSION_CONV = (1<<12);
internal const int CURL_VERSION_CURLDEBUG = (1<<13);
internal const int CURL_VERSION_TLSAUTH_SRP = (1<<14);
internal const int CURL_VERSION_NTLM_WB = (1<<15);
internal const int CURL_VERSION_HTTP2 = (1<<16);
internal const int CURL_VERSION_GSSAPI = (1<<17);
internal const int CURL_VERSION_KERBEROS5 = (1<<18);
internal const int CURL_VERSION_UNIX_SOCKETS = (1<<19);
}
internal static partial class CURLPROTO_Definitions
{
internal const int CURLPROTO_HTTP = (1<<0);
internal const int CURLPROTO_HTTPS = (1<<1);
}
// Type definition of CURLMsg from multi.h
[StructLayout(LayoutKind.Explicit)]
internal struct CURLMsg
{
[FieldOffset(0)]
internal int msg;
[FieldOffset(8)]
internal IntPtr easy_handle;
[FieldOffset(16)]
internal IntPtr data;
[FieldOffset(16)]
internal int result;
}
// NOTE: The definition of this structure in Curl/curl.h is larger than
// than what is defined below. This definition is only valid for use with
// Marshal.PtrToStructure and not for general use in P/Invoke signatures.
[StructLayout(LayoutKind.Sequential)]
internal struct curl_version_info_data
{
internal int age;
private unsafe char *version;
private int versionNum;
private unsafe char *host;
internal int features;
}
// Poll values used with curl_multi_wait and curl_waitfd.events/revents
internal const int CURL_WAIT_POLLIN = 0x0001;
internal const int CURL_WAIT_POLLPRI = 0x0002;
internal const int CURL_WAIT_POLLOUT = 0x0004;
#pragma warning disable 0649 // until this file is split up, this produces a warning in the X509 project due to being unused
internal struct curl_waitfd
{
internal int fd;
internal short events;
internal short revents;
};
#pragma warning restore 0649
public delegate size_t curl_readwrite_callback(
IntPtr buffer,
size_t size,
size_t nitems,
IntPtr context);
public delegate int seek_callback(
IntPtr userp,
curl_off_t offset,
int origin);
public const int CURL_READFUNC_ABORT = 0x10000000;
public const int CURL_READFUNC_PAUSE = 0x10000001;
public const int CURL_WRITEFUNC_PAUSE = 0x10000001;
public const int CURLPAUSE_CONT = 0;
}
}
| |
using System;
using System.Data;
using Csla;
using Csla.Data;
using ParentLoadSoftDelete.DataAccess;
using ParentLoadSoftDelete.DataAccess.ERCLevel;
namespace ParentLoadSoftDelete.Business.ERCLevel
{
/// <summary>
/// F03_Continent_ReChild (editable child object).<br/>
/// This is a generated base class of <see cref="F03_Continent_ReChild"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="F02_Continent"/> collection.
/// </remarks>
[Serializable]
public partial class F03_Continent_ReChild : BusinessBase<F03_Continent_ReChild>
{
#region State Fields
[NotUndoable]
[NonSerialized]
internal int continent_ID2 = 0;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="Continent_Child_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> Continent_Child_NameProperty = RegisterProperty<string>(p => p.Continent_Child_Name, "SubContinents Child Name");
/// <summary>
/// Gets or sets the SubContinents Child Name.
/// </summary>
/// <value>The SubContinents Child Name.</value>
public string Continent_Child_Name
{
get { return GetProperty(Continent_Child_NameProperty); }
set { SetProperty(Continent_Child_NameProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="F03_Continent_ReChild"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="F03_Continent_ReChild"/> object.</returns>
internal static F03_Continent_ReChild NewF03_Continent_ReChild()
{
return DataPortal.CreateChild<F03_Continent_ReChild>();
}
/// <summary>
/// Factory method. Loads a <see cref="F03_Continent_ReChild"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="F03_Continent_ReChild"/> object.</returns>
internal static F03_Continent_ReChild GetF03_Continent_ReChild(SafeDataReader dr)
{
F03_Continent_ReChild obj = new F03_Continent_ReChild();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.MarkOld();
// check all object rules and property rules
obj.BusinessRules.CheckRules();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="F03_Continent_ReChild"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public F03_Continent_ReChild()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="F03_Continent_ReChild"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="F03_Continent_ReChild"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(Continent_Child_NameProperty, dr.GetString("Continent_Child_Name"));
// parent properties
continent_ID2 = dr.GetInt32("Continent_ID2");
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Inserts a new <see cref="F03_Continent_ReChild"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(F02_Continent parent)
{
using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnInsertPre(args);
var dal = dalManager.GetProvider<IF03_Continent_ReChildDal>();
using (BypassPropertyChecks)
{
dal.Insert(
parent.Continent_ID,
Continent_Child_Name
);
}
OnInsertPost(args);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="F03_Continent_ReChild"/> object.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update(F02_Continent parent)
{
if (!IsDirty)
return;
using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnUpdatePre(args);
var dal = dalManager.GetProvider<IF03_Continent_ReChildDal>();
using (BypassPropertyChecks)
{
dal.Update(
parent.Continent_ID,
Continent_Child_Name
);
}
OnUpdatePost(args);
}
}
/// <summary>
/// Self deletes the <see cref="F03_Continent_ReChild"/> object from database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf(F02_Continent parent)
{
using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager())
{
var args = new DataPortalHookArgs();
OnDeletePre(args);
var dal = dalManager.GetProvider<IF03_Continent_ReChildDal>();
using (BypassPropertyChecks)
{
dal.Delete(parent.Continent_ID);
}
OnDeletePost(args);
}
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
using System;
using System.IO;
using System.Runtime.InteropServices;
namespace Python.Test
{
/// <summary>
/// Supports units tests for method access.
/// </summary>
public class MethodTest
{
public MethodTest()
{
}
public string PublicMethod()
{
return "public";
}
public static string PublicStaticMethod()
{
return "public static";
}
protected string ProtectedMethod()
{
return "protected";
}
protected static string ProtectedStaticMethod()
{
return "protected static";
}
internal string InternalMethod()
{
return "internal";
}
internal static string InternalStaticMethod()
{
return "internal static";
}
private string PrivateMethod()
{
return "private";
}
private static string PrivateStaticMethod()
{
return "private static";
}
/// <summary>
/// Methods to support specific argument conversion unit tests
/// </summary>
public TypeCode TestEnumConversion(TypeCode v)
{
return v;
}
public FileAccess TestFlagsConversion(FileAccess v)
{
return v;
}
public Guid TestStructConversion(Guid v)
{
return v;
}
public Exception TestSubclassConversion(Exception v)
{
return v;
}
public Type[] TestNullArrayConversion(Type[] v)
{
return v;
}
public static string[] TestStringParamsArg(params string[] args)
{
return args;
}
public static object[] TestObjectParamsArg(params object[] args)
{
return args;
}
public static int[] TestValueParamsArg(params int[] args)
{
return args;
}
public static int[] TestOneArgWithParams(string s, params int[] args)
{
return args;
}
public static int[] TestTwoArgWithParams(string s, string x, params int[] args)
{
return args;
}
public static int[] TestOverloadedParams(string v, params int[] args)
{
return args;
}
public static int[] TestOverloadedParams(int v, int[] args)
{
return args;
}
public static string TestOverloadedNoObject(int i)
{
return "Got int";
}
public static string TestOverloadedObject(int i)
{
return "Got int";
}
public static string TestOverloadedObject(object o)
{
return "Got object";
}
public static string TestOverloadedObjectTwo(int a, int b)
{
return "Got int-int";
}
public static string TestOverloadedObjectTwo(string a, string b)
{
return "Got string-string";
}
public static string TestOverloadedObjectTwo(string a, int b)
{
return "Got string-int";
}
public static string TestOverloadedObjectTwo(string a, object b)
{
return "Got string-object";
}
public static string TestOverloadedObjectTwo(int a, object b)
{
return "Got int-object";
}
public static string TestOverloadedObjectTwo(object a, int b)
{
return "Got object-int";
}
public static string TestOverloadedObjectTwo(object a, object b)
{
return "Got object-object";
}
public static string TestOverloadedObjectTwo(int a, string b)
{
return "Got int-string";
}
public static string TestOverloadedObjectThree(object a, int b)
{
return "Got object-int";
}
public static string TestOverloadedObjectThree(int a, object b)
{
return "Got int-object";
}
public static bool TestStringOutParams(string s, out string s1)
{
s1 = "output string";
return true;
}
public static bool TestStringRefParams(string s, ref string s1)
{
s1 = "output string";
return true;
}
public static bool TestNonParamsArrayInLastPlace(int i1, int[] i2)
{
return false;
}
public static bool TestNonParamsArrayInLastPlace(int i1, int i2, int i3)
{
return true;
}
public static bool TestValueOutParams(string s, out int i1)
{
i1 = 42;
return true;
}
public static bool TestValueRefParams(string s, ref int i1)
{
i1 = 42;
return true;
}
public static bool TestObjectOutParams(object o, out object o1)
{
o1 = new Exception("test");
return true;
}
public static bool TestObjectRefParams(object o, ref object o1)
{
o1 = new Exception("test");
return true;
}
public static bool TestStructOutParams(object o, out Guid o1)
{
o1 = Guid.NewGuid();
return true;
}
public static bool TestStructRefParams(object o, ref Guid o1)
{
o1 = Guid.NewGuid();
return true;
}
public static void TestVoidSingleOutParam(out int i)
{
i = 42;
}
public static void TestVoidSingleRefParam(ref int i)
{
i = 42;
}
public static int TestSingleDefaultParam(int i = 5)
{
return i;
}
public static int TestTwoDefaultParam(int i = 5, int j = 6)
{
return i + j;
}
public static int TestOneArgAndTwoDefaultParam(int z, int i = 5, int j = 6)
{
return i + j + z;
}
// overload selection test support
public static bool Overloaded(bool v)
{
return v;
}
public static byte Overloaded(byte v)
{
return v;
}
public static sbyte Overloaded(sbyte v)
{
return v;
}
public static char Overloaded(char v)
{
return v;
}
public static short Overloaded(short v)
{
return v;
}
public static int Overloaded(int v)
{
return v;
}
public static long Overloaded(long v)
{
return v;
}
public static ushort Overloaded(ushort v)
{
return v;
}
public static uint Overloaded(uint v)
{
return v;
}
public static ulong Overloaded(ulong v)
{
return v;
}
public static float Overloaded(float v)
{
return v;
}
public static double Overloaded(double v)
{
return v;
}
public static decimal Overloaded(decimal v)
{
return v;
}
public static string Overloaded(string v)
{
return v;
}
public static ShortEnum Overloaded(ShortEnum v)
{
return v;
}
public static object Overloaded(object v)
{
return v;
}
public static InterfaceTest Overloaded(InterfaceTest v)
{
return v;
}
public static ISayHello1 Overloaded(ISayHello1 v)
{
return v;
}
public static bool[] Overloaded(bool[] v)
{
return v;
}
public static byte[] Overloaded(byte[] v)
{
return v;
}
public static sbyte[] Overloaded(sbyte[] v)
{
return v;
}
public static char[] Overloaded(char[] v)
{
return v;
}
public static short[] Overloaded(short[] v)
{
return v;
}
public static int[] Overloaded(int[] v)
{
return v;
}
public static long[] Overloaded(long[] v)
{
return v;
}
public static ushort[] Overloaded(ushort[] v)
{
return v;
}
public static uint[] Overloaded(uint[] v)
{
return v;
}
public static ulong[] Overloaded(ulong[] v)
{
return v;
}
public static float[] Overloaded(float[] v)
{
return v;
}
public static double[] Overloaded(double[] v)
{
return v;
}
public static decimal[] Overloaded(decimal[] v)
{
return v;
}
public static string[] Overloaded(string[] v)
{
return v;
}
public static ShortEnum[] Overloaded(ShortEnum[] v)
{
return v;
}
public static object[] Overloaded(object[] v)
{
return v;
}
public static InterfaceTest[] Overloaded(InterfaceTest[] v)
{
return v;
}
public static ISayHello1[] Overloaded(ISayHello1[] v)
{
return v;
}
public static GenericWrapper<bool> Overloaded(GenericWrapper<bool> v)
{
return v;
}
public static GenericWrapper<byte> Overloaded(GenericWrapper<byte> v)
{
return v;
}
public static GenericWrapper<sbyte> Overloaded(GenericWrapper<sbyte> v)
{
return v;
}
public static GenericWrapper<char> Overloaded(GenericWrapper<char> v)
{
return v;
}
public static GenericWrapper<short> Overloaded(GenericWrapper<short> v)
{
return v;
}
public static GenericWrapper<int> Overloaded(GenericWrapper<int> v)
{
return v;
}
public static GenericWrapper<long> Overloaded(GenericWrapper<long> v)
{
return v;
}
public static GenericWrapper<ushort> Overloaded(GenericWrapper<ushort> v)
{
return v;
}
public static GenericWrapper<uint> Overloaded(GenericWrapper<uint> v)
{
return v;
}
public static GenericWrapper<ulong> Overloaded(GenericWrapper<ulong> v)
{
return v;
}
public static GenericWrapper<float> Overloaded(GenericWrapper<float> v)
{
return v;
}
public static GenericWrapper<double> Overloaded(GenericWrapper<double> v)
{
return v;
}
public static GenericWrapper<decimal> Overloaded(GenericWrapper<decimal> v)
{
return v;
}
public static GenericWrapper<string> Overloaded(GenericWrapper<string> v)
{
return v;
}
public static GenericWrapper<ShortEnum> Overloaded(GenericWrapper<ShortEnum> v)
{
return v;
}
public static GenericWrapper<object> Overloaded(GenericWrapper<object> v)
{
return v;
}
public static GenericWrapper<InterfaceTest> Overloaded(GenericWrapper<InterfaceTest> v)
{
return v;
}
public static GenericWrapper<ISayHello1> Overloaded(GenericWrapper<ISayHello1> v)
{
return v;
}
public static GenericWrapper<bool>[] Overloaded(GenericWrapper<bool>[] v)
{
return v;
}
public static GenericWrapper<byte>[] Overloaded(GenericWrapper<byte>[] v)
{
return v;
}
public static GenericWrapper<sbyte>[] Overloaded(GenericWrapper<sbyte>[] v)
{
return v;
}
public static GenericWrapper<char>[] Overloaded(GenericWrapper<char>[] v)
{
return v;
}
public static GenericWrapper<short>[] Overloaded(GenericWrapper<short>[] v)
{
return v;
}
public static GenericWrapper<int>[] Overloaded(GenericWrapper<int>[] v)
{
return v;
}
public static GenericWrapper<long>[] Overloaded(GenericWrapper<long>[] v)
{
return v;
}
public static GenericWrapper<ushort>[] Overloaded(GenericWrapper<ushort>[] v)
{
return v;
}
public static GenericWrapper<uint>[] Overloaded(GenericWrapper<uint>[] v)
{
return v;
}
public static GenericWrapper<ulong>[] Overloaded(GenericWrapper<ulong>[] v)
{
return v;
}
public static GenericWrapper<float>[] Overloaded(GenericWrapper<float>[] v)
{
return v;
}
public static GenericWrapper<double>[] Overloaded(GenericWrapper<double>[] v)
{
return v;
}
public static GenericWrapper<decimal>[] Overloaded(GenericWrapper<decimal>[] v)
{
return v;
}
public static GenericWrapper<string>[] Overloaded(GenericWrapper<string>[] v)
{
return v;
}
public static GenericWrapper<ShortEnum>[] Overloaded(GenericWrapper<ShortEnum>[] v)
{
return v;
}
public static GenericWrapper<object>[] Overloaded(GenericWrapper<object>[] v)
{
return v;
}
public static GenericWrapper<InterfaceTest>[] Overloaded(GenericWrapper<InterfaceTest>[] v)
{
return v;
}
public static GenericWrapper<ISayHello1>[] Overloaded(GenericWrapper<ISayHello1>[] v)
{
return v;
}
public static int Overloaded(string s, int i, object[] o)
{
return o.Length;
}
public static int Overloaded(string s, int i)
{
return i;
}
public static int Overloaded(int i, string s)
{
return i;
}
public static string CaseSensitive()
{
return "CaseSensitive";
}
public static string Casesensitive()
{
return "Casesensitive";
}
public static string DefaultParams(int a=0, int b=0, int c=0, int d=0)
{
return string.Format("{0}{1}{2}{3}", a, b, c, d);
}
public static string OptionalParams([Optional]int a, [Optional]int b, [Optional]int c, [Optional] int d)
{
return string.Format("{0}{1}{2}{3}", a, b, c, d);
}
public static bool OptionalParams_TestMissing([Optional]object a)
{
return a == Type.Missing;
}
public static bool OptionalParams_TestReferenceType([Optional]string a)
{
return a == null;
}
public static string OptionalAndDefaultParams([Optional]int a, [Optional]int b, int c=0, int d=0)
{
return string.Format("{0}{1}{2}{3}", a, b, c, d);
}
public static string OptionalAndDefaultParams2([Optional]int a, [Optional]int b, [Optional, DefaultParameterValue(1)]int c, int d = 2)
{
return string.Format("{0}{1}{2}{3}", a, b, c, d);
}
}
public class MethodTestSub : MethodTest
{
public MethodTestSub() : base()
{
}
public string PublicMethod(string echo)
{
return echo;
}
}
}
namespace PlainOldNamespace
{
public class PlainOldClass
{
public PlainOldClass() { }
public PlainOldClass(int param) { }
private readonly byte[] payload = new byte[(int)Math.Pow(2, 20)]; //1 MB
public void NonGenericMethod() { }
public void GenericMethod<T>() { }
public void OverloadedMethod() { }
public void OverloadedMethod(int param) { }
}
}
| |
//
// TypeSystem.cs
//
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2011 Jb Evain
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using Mono.Cecil.Metadata;
namespace Mono.Cecil {
public abstract class TypeSystem {
sealed class CoreTypeSystem : TypeSystem {
public CoreTypeSystem (ModuleDefinition module)
: base (module)
{
}
internal override TypeReference LookupType (string @namespace, string name)
{
var type = LookupTypeDefinition (@namespace, name) ?? LookupTypeForwarded (@namespace, name);
if (type != null)
return type;
throw new NotSupportedException ();
}
TypeReference LookupTypeDefinition (string @namespace, string name)
{
var metadata = module.MetadataSystem;
if (metadata.Types == null)
Initialize (module.Types);
return module.Read (new Row<string, string> (@namespace, name), (row, reader) => {
var types = reader.metadata.Types;
for (int i = 0; i < types.Length; i++) {
if (types [i] == null)
types [i] = reader.GetTypeDefinition ((uint) i + 1);
var type = types [i];
if (type.Name == row.Col2 && type.Namespace == row.Col1)
return type;
}
return null;
});
}
TypeReference LookupTypeForwarded (string @namespace, string name)
{
if (!module.HasExportedTypes)
return null;
var exported_types = module.ExportedTypes;
for (int i = 0; i < exported_types.Count; i++) {
var exported_type = exported_types [i];
if (exported_type.Name == name && exported_type.Namespace == @namespace)
return exported_type.CreateReference ();
}
return null;
}
static void Initialize (object obj)
{
}
}
sealed class CommonTypeSystem : TypeSystem {
AssemblyNameReference corlib;
public CommonTypeSystem (ModuleDefinition module)
: base (module)
{
}
internal override TypeReference LookupType (string @namespace, string name)
{
return CreateTypeReference (@namespace, name);
}
public AssemblyNameReference GetCorlibReference ()
{
if (corlib != null)
return corlib;
const string mscorlib = "mscorlib";
var references = module.AssemblyReferences;
for (int i = 0; i < references.Count; i++) {
var reference = references [i];
if (reference.Name == mscorlib)
return corlib = reference;
}
corlib = new AssemblyNameReference {
Name = mscorlib,
Version = GetCorlibVersion (),
PublicKeyToken = new byte [] { 0xb7, 0x7a, 0x5c, 0x56, 0x19, 0x34, 0xe0, 0x89 },
};
references.Add (corlib);
return corlib;
}
Version GetCorlibVersion ()
{
switch (module.Runtime) {
case TargetRuntime.Net_1_0:
case TargetRuntime.Net_1_1:
return new Version (1, 0, 0, 0);
case TargetRuntime.Net_2_0:
return new Version (2, 0, 0, 0);
case TargetRuntime.Net_4_0:
return new Version (4, 0, 0, 0);
default:
throw new NotSupportedException ();
}
}
TypeReference CreateTypeReference (string @namespace, string name)
{
return new TypeReference (@namespace, name, module, GetCorlibReference ());
}
}
readonly ModuleDefinition module;
TypeReference type_object;
TypeReference type_void;
TypeReference type_bool;
TypeReference type_char;
TypeReference type_sbyte;
TypeReference type_byte;
TypeReference type_int16;
TypeReference type_uint16;
TypeReference type_int32;
TypeReference type_uint32;
TypeReference type_int64;
TypeReference type_uint64;
TypeReference type_single;
TypeReference type_double;
TypeReference type_intptr;
TypeReference type_uintptr;
TypeReference type_string;
TypeReference type_typedref;
TypeSystem (ModuleDefinition module)
{
this.module = module;
}
internal static TypeSystem CreateTypeSystem (ModuleDefinition module)
{
if (module.IsCorlib ())
return new CoreTypeSystem (module);
return new CommonTypeSystem (module);
}
internal abstract TypeReference LookupType (string @namespace, string name);
TypeReference LookupSystemType (ref TypeReference reference, string name, ElementType element_type)
{
lock (module.SyncRoot) {
if (reference != null)
return reference;
var type = LookupType ("System", name);
type.etype = element_type;
return reference = type;
}
}
TypeReference LookupSystemValueType (ref TypeReference typeRef, string name, ElementType element_type)
{
lock (module.SyncRoot) {
if (typeRef != null)
return typeRef;
var type = LookupType ("System", name);
type.etype = element_type;
type.IsValueType = true;
return typeRef = type;
}
}
public IMetadataScope Corlib {
get {
var common = this as CommonTypeSystem;
if (common == null)
return module;
return common.GetCorlibReference ();
}
}
public TypeReference Object {
get { return type_object ?? (LookupSystemType (ref type_object, "Object", ElementType.Object)); }
}
public TypeReference Void {
get { return type_void ?? (LookupSystemType (ref type_void, "Void", ElementType.Void)); }
}
public TypeReference Boolean {
get { return type_bool ?? (LookupSystemValueType (ref type_bool, "Boolean", ElementType.Boolean)); }
}
public TypeReference Char {
get { return type_char ?? (LookupSystemValueType (ref type_char, "Char", ElementType.Char)); }
}
public TypeReference SByte {
get { return type_sbyte ?? (LookupSystemValueType (ref type_sbyte, "SByte", ElementType.I1)); }
}
public TypeReference Byte {
get { return type_byte ?? (LookupSystemValueType (ref type_byte, "Byte", ElementType.U1)); }
}
public TypeReference Int16 {
get { return type_int16 ?? (LookupSystemValueType (ref type_int16, "Int16", ElementType.I2)); }
}
public TypeReference UInt16 {
get { return type_uint16 ?? (LookupSystemValueType (ref type_uint16, "UInt16", ElementType.U2)); }
}
public TypeReference Int32 {
get { return type_int32 ?? (LookupSystemValueType (ref type_int32, "Int32", ElementType.I4)); }
}
public TypeReference UInt32 {
get { return type_uint32 ?? (LookupSystemValueType (ref type_uint32, "UInt32", ElementType.U4)); }
}
public TypeReference Int64 {
get { return type_int64 ?? (LookupSystemValueType (ref type_int64, "Int64", ElementType.I8)); }
}
public TypeReference UInt64 {
get { return type_uint64 ?? (LookupSystemValueType (ref type_uint64, "UInt64", ElementType.U8)); }
}
public TypeReference Single {
get { return type_single ?? (LookupSystemValueType (ref type_single, "Single", ElementType.R4)); }
}
public TypeReference Double {
get { return type_double ?? (LookupSystemValueType (ref type_double, "Double", ElementType.R8)); }
}
public TypeReference IntPtr {
get { return type_intptr ?? (LookupSystemValueType (ref type_intptr, "IntPtr", ElementType.I)); }
}
public TypeReference UIntPtr {
get { return type_uintptr ?? (LookupSystemValueType (ref type_uintptr, "UIntPtr", ElementType.U)); }
}
public TypeReference String {
get { return type_string ?? (LookupSystemType (ref type_string, "String", ElementType.String)); }
}
public TypeReference TypedReference {
get { return type_typedref ?? (LookupSystemValueType (ref type_typedref, "TypedReference", ElementType.TypedByRef)); }
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Sql
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for SyncGroupsOperations.
/// </summary>
public static partial class SyncGroupsOperationsExtensions
{
/// <summary>
/// Gets a collection of sync database ids.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='locationName'>
/// The name of the region where the resource is located.
/// </param>
public static IPage<SyncDatabaseIdProperties> ListSyncDatabaseIds(this ISyncGroupsOperations operations, string locationName)
{
return operations.ListSyncDatabaseIdsAsync(locationName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a collection of sync database ids.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='locationName'>
/// The name of the region where the resource is located.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncDatabaseIdProperties>> ListSyncDatabaseIdsAsync(this ISyncGroupsOperations operations, string locationName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSyncDatabaseIdsWithHttpMessagesAsync(locationName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Refreshes a hub database schema.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static void RefreshHubSchema(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
operations.RefreshHubSchemaAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Refreshes a hub database schema.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task RefreshHubSchemaAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.RefreshHubSchemaWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Gets a collection of hub database schemas.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static IPage<SyncFullSchemaProperties> ListHubSchemas(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
return operations.ListHubSchemasAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a collection of hub database schemas.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncFullSchemaProperties>> ListHubSchemasAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListHubSchemasWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a collection of sync group logs.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='startTime'>
/// Get logs generated after this time.
/// </param>
/// <param name='endTime'>
/// Get logs generated before this time.
/// </param>
/// <param name='type'>
/// The types of logs to retrieve. Possible values include: 'All', 'Error',
/// 'Warning', 'Success'
/// </param>
/// <param name='continuationToken'>
/// The continuation token for this operation.
/// </param>
public static IPage<SyncGroupLogProperties> ListLogs(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, string startTime, string endTime, string type, string continuationToken = default(string))
{
return operations.ListLogsAsync(resourceGroupName, serverName, databaseName, syncGroupName, startTime, endTime, type, continuationToken).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a collection of sync group logs.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='startTime'>
/// Get logs generated after this time.
/// </param>
/// <param name='endTime'>
/// Get logs generated before this time.
/// </param>
/// <param name='type'>
/// The types of logs to retrieve. Possible values include: 'All', 'Error',
/// 'Warning', 'Success'
/// </param>
/// <param name='continuationToken'>
/// The continuation token for this operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncGroupLogProperties>> ListLogsAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, string startTime, string endTime, string type, string continuationToken = default(string), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListLogsWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, startTime, endTime, type, continuationToken, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Cancels a sync group synchronization.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static void CancelSync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
operations.CancelSyncAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Cancels a sync group synchronization.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task CancelSyncAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.CancelSyncWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Triggers a sync group synchronization.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static void TriggerSync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
operations.TriggerSyncAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Triggers a sync group synchronization.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task TriggerSyncAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.TriggerSyncWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Gets a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static SyncGroup Get(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
return operations.GetAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SyncGroup> GetAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
public static SyncGroup CreateOrUpdate(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SyncGroup> CreateOrUpdateAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static void Delete(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
operations.DeleteAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.DeleteWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
public static SyncGroup Update(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters)
{
return operations.UpdateAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SyncGroup> UpdateAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.UpdateWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists sync groups under a hub database.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
public static IPage<SyncGroup> ListByDatabase(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName)
{
return operations.ListByDatabaseAsync(resourceGroupName, serverName, databaseName).GetAwaiter().GetResult();
}
/// <summary>
/// Lists sync groups under a hub database.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncGroup>> ListByDatabaseAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByDatabaseWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Refreshes a hub database schema.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static void BeginRefreshHubSchema(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
operations.BeginRefreshHubSchemaAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Refreshes a hub database schema.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginRefreshHubSchemaAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.BeginRefreshHubSchemaWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Creates or updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
public static SyncGroup BeginCreateOrUpdate(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters)
{
return operations.BeginCreateOrUpdateAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SyncGroup> BeginCreateOrUpdateAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
public static void BeginDelete(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName)
{
operations.BeginDeleteAsync(resourceGroupName, serverName, databaseName, syncGroupName).GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDeleteAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, CancellationToken cancellationToken = default(CancellationToken))
{
(await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, null, cancellationToken).ConfigureAwait(false)).Dispose();
}
/// <summary>
/// Updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
public static SyncGroup BeginUpdate(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters)
{
return operations.BeginUpdateAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters).GetAwaiter().GetResult();
}
/// <summary>
/// Updates a sync group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group that contains the resource. You can obtain
/// this value from the Azure Resource Manager API or the portal.
/// </param>
/// <param name='serverName'>
/// The name of the server.
/// </param>
/// <param name='databaseName'>
/// The name of the database on which the sync group is hosted.
/// </param>
/// <param name='syncGroupName'>
/// The name of the sync group.
/// </param>
/// <param name='parameters'>
/// The requested sync group resource state.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<SyncGroup> BeginUpdateAsync(this ISyncGroupsOperations operations, string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginUpdateWithHttpMessagesAsync(resourceGroupName, serverName, databaseName, syncGroupName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a collection of sync database ids.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<SyncDatabaseIdProperties> ListSyncDatabaseIdsNext(this ISyncGroupsOperations operations, string nextPageLink)
{
return operations.ListSyncDatabaseIdsNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a collection of sync database ids.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncDatabaseIdProperties>> ListSyncDatabaseIdsNextAsync(this ISyncGroupsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSyncDatabaseIdsNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a collection of hub database schemas.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<SyncFullSchemaProperties> ListHubSchemasNext(this ISyncGroupsOperations operations, string nextPageLink)
{
return operations.ListHubSchemasNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a collection of hub database schemas.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncFullSchemaProperties>> ListHubSchemasNextAsync(this ISyncGroupsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListHubSchemasNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a collection of sync group logs.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<SyncGroupLogProperties> ListLogsNext(this ISyncGroupsOperations operations, string nextPageLink)
{
return operations.ListLogsNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a collection of sync group logs.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncGroupLogProperties>> ListLogsNextAsync(this ISyncGroupsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListLogsNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists sync groups under a hub database.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<SyncGroup> ListByDatabaseNext(this ISyncGroupsOperations operations, string nextPageLink)
{
return operations.ListByDatabaseNextAsync(nextPageLink).GetAwaiter().GetResult();
}
/// <summary>
/// Lists sync groups under a hub database.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<SyncGroup>> ListByDatabaseNextAsync(this ISyncGroupsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByDatabaseNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
namespace RRLab.PhysiologyWorkbench
{
partial class TestPulseSettingsBox
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param genotype="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
this.tbPulseTime = new System.Windows.Forms.TextBox();
this.TestPulseBindingSource = new System.Windows.Forms.BindingSource(this.components);
this.tbPulsePotential = new System.Windows.Forms.TextBox();
this.tbRestingPotential = new System.Windows.Forms.TextBox();
this.tbRestingTime = new System.Windows.Forms.TextBox();
this.label6 = new System.Windows.Forms.Label();
this.label4 = new System.Windows.Forms.Label();
this.label2 = new System.Windows.Forms.Label();
this.label1 = new System.Windows.Forms.Label();
this.label3 = new System.Windows.Forms.Label();
this.label5 = new System.Windows.Forms.Label();
this.tableLayoutPanel1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.TestPulseBindingSource)).BeginInit();
this.SuspendLayout();
//
// tableLayoutPanel1
//
this.tableLayoutPanel1.ColumnCount = 2;
this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle());
this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle());
this.tableLayoutPanel1.Controls.Add(this.tbPulseTime, 1, 4);
this.tableLayoutPanel1.Controls.Add(this.tbPulsePotential, 1, 5);
this.tableLayoutPanel1.Controls.Add(this.tbRestingPotential, 1, 2);
this.tableLayoutPanel1.Controls.Add(this.tbRestingTime, 1, 1);
this.tableLayoutPanel1.Controls.Add(this.label6, 0, 4);
this.tableLayoutPanel1.Controls.Add(this.label4, 0, 1);
this.tableLayoutPanel1.Controls.Add(this.label2, 0, 5);
this.tableLayoutPanel1.Controls.Add(this.label1, 0, 2);
this.tableLayoutPanel1.Controls.Add(this.label3, 0, 3);
this.tableLayoutPanel1.Controls.Add(this.label5, 0, 0);
this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
this.tableLayoutPanel1.Name = "tableLayoutPanel1";
this.tableLayoutPanel1.RowCount = 7;
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F));
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle());
this.tableLayoutPanel1.Size = new System.Drawing.Size(129, 155);
this.tableLayoutPanel1.TabIndex = 19;
//
// tbPulseTime
//
this.tbPulseTime.DataBindings.Add(new System.Windows.Forms.Binding("Text", this.TestPulseBindingSource, "PulseLength", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.tbPulseTime.Dock = System.Windows.Forms.DockStyle.Fill;
this.tbPulseTime.Location = new System.Drawing.Point(87, 88);
this.tbPulseTime.Name = "tbPulseTime";
this.tbPulseTime.Size = new System.Drawing.Size(39, 20);
this.tbPulseTime.TabIndex = 25;
//
// TestPulseBindingSource
//
this.TestPulseBindingSource.DataSource = typeof(RRLab.PhysiologyWorkbench.Daq.TestPulseProtocol);
//
// tbPulsePotential
//
this.tbPulsePotential.DataBindings.Add(new System.Windows.Forms.Binding("Text", this.TestPulseBindingSource, "PulsePotential", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.tbPulsePotential.Dock = System.Windows.Forms.DockStyle.Fill;
this.tbPulsePotential.Location = new System.Drawing.Point(87, 114);
this.tbPulsePotential.Name = "tbPulsePotential";
this.tbPulsePotential.Size = new System.Drawing.Size(39, 20);
this.tbPulsePotential.TabIndex = 24;
//
// tbRestingPotential
//
this.tbRestingPotential.DataBindings.Add(new System.Windows.Forms.Binding("Text", this.TestPulseBindingSource, "RestingPotential", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.tbRestingPotential.Dock = System.Windows.Forms.DockStyle.Fill;
this.tbRestingPotential.Location = new System.Drawing.Point(87, 49);
this.tbRestingPotential.Name = "tbRestingPotential";
this.tbRestingPotential.Size = new System.Drawing.Size(39, 20);
this.tbRestingPotential.TabIndex = 23;
//
// tbRestingTime
//
this.tbRestingTime.DataBindings.Add(new System.Windows.Forms.Binding("Text", this.TestPulseBindingSource, "RestingLength", true, System.Windows.Forms.DataSourceUpdateMode.OnPropertyChanged));
this.tbRestingTime.Dock = System.Windows.Forms.DockStyle.Fill;
this.tbRestingTime.Location = new System.Drawing.Point(87, 23);
this.tbRestingTime.Name = "tbRestingTime";
this.tbRestingTime.Size = new System.Drawing.Size(39, 20);
this.tbRestingTime.TabIndex = 22;
//
// label6
//
this.label6.Dock = System.Windows.Forms.DockStyle.Fill;
this.label6.Location = new System.Drawing.Point(3, 85);
this.label6.Name = "label6";
this.label6.Size = new System.Drawing.Size(78, 26);
this.label6.TabIndex = 21;
this.label6.Text = "Length (ms)";
this.label6.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// label4
//
this.label4.Dock = System.Windows.Forms.DockStyle.Fill;
this.label4.Location = new System.Drawing.Point(3, 20);
this.label4.Name = "label4";
this.label4.Size = new System.Drawing.Size(78, 26);
this.label4.TabIndex = 20;
this.label4.Text = "Length (ms)";
this.label4.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// label2
//
this.label2.Dock = System.Windows.Forms.DockStyle.Fill;
this.label2.Location = new System.Drawing.Point(3, 111);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(78, 26);
this.label2.TabIndex = 19;
this.label2.Text = "Potential (mV)";
this.label2.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// label1
//
this.label1.Dock = System.Windows.Forms.DockStyle.Fill;
this.label1.Location = new System.Drawing.Point(3, 46);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(78, 26);
this.label1.TabIndex = 18;
this.label1.Text = "Potential (mV)";
this.label1.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// label3
//
this.label3.AutoSize = true;
this.tableLayoutPanel1.SetColumnSpan(this.label3, 2);
this.label3.Dock = System.Windows.Forms.DockStyle.Fill;
this.label3.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.label3.Location = new System.Drawing.Point(3, 72);
this.label3.Name = "label3";
this.label3.Size = new System.Drawing.Size(123, 13);
this.label3.TabIndex = 26;
this.label3.Text = "Pulse";
this.label3.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// label5
//
this.label5.AutoSize = true;
this.tableLayoutPanel1.SetColumnSpan(this.label5, 2);
this.label5.Dock = System.Windows.Forms.DockStyle.Fill;
this.label5.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
this.label5.Location = new System.Drawing.Point(3, 0);
this.label5.Name = "label5";
this.label5.Size = new System.Drawing.Size(123, 20);
this.label5.TabIndex = 27;
this.label5.Text = "Rest";
this.label5.TextAlign = System.Drawing.ContentAlignment.MiddleLeft;
//
// TestPulseSettingsBox
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.Controls.Add(this.tableLayoutPanel1);
this.Name = "TestPulseSettingsBox";
this.Size = new System.Drawing.Size(129, 155);
this.tableLayoutPanel1.ResumeLayout(false);
this.tableLayoutPanel1.PerformLayout();
((System.ComponentModel.ISupportInitialize)(this.TestPulseBindingSource)).EndInit();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
private System.Windows.Forms.TextBox tbPulseTime;
private System.Windows.Forms.TextBox tbPulsePotential;
private System.Windows.Forms.TextBox tbRestingPotential;
private System.Windows.Forms.TextBox tbRestingTime;
private System.Windows.Forms.Label label6;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Label label5;
private System.Windows.Forms.BindingSource TestPulseBindingSource;
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.IO;
using DiscUtils.Vhd;
using NUnit.Framework;
namespace DiscUtils
{
[TestFixture]
public class DiscFileSystemDirectoryTest
{
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Create(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo("SOMEDIR");
dirInfo.Create();
Assert.AreEqual(1, fs.Root.GetDirectories().Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void CreateRecursive(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo(@"SOMEDIR\CHILDDIR");
dirInfo.Create();
Assert.AreEqual(1, fs.Root.GetDirectories().Length);
Assert.AreEqual(1, fs.GetDirectoryInfo(@"SOMEDIR").GetDirectories().Length);
Assert.AreEqual("CHILDDIR", fs.GetDirectoryInfo(@"SOMEDIR").GetDirectories()[0].Name);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void CreateExisting(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo("SOMEDIR");
dirInfo.Create();
dirInfo.Create();
Assert.AreEqual(1, fs.Root.GetDirectories().Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
[ExpectedException(typeof(IOException))]
[Category("ThrowsException")]
public void CreateInvalid_Long(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo(new String('X', 256));
dirInfo.Create();
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
[ExpectedException(typeof(IOException))]
[Category("ThrowsException")]
public void CreateInvalid_Characters(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo("SOME\0DIR");
dirInfo.Create();
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Exists(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo(@"SOMEDIR\CHILDDIR");
dirInfo.Create();
Assert.IsTrue(fs.GetDirectoryInfo(@"\").Exists);
Assert.IsTrue(fs.GetDirectoryInfo(@"SOMEDIR").Exists);
Assert.IsTrue(fs.GetDirectoryInfo(@"SOMEDIR\CHILDDIR").Exists);
Assert.IsTrue(fs.GetDirectoryInfo(@"SOMEDIR\CHILDDIR\").Exists);
Assert.IsFalse(fs.GetDirectoryInfo(@"NONDIR").Exists);
Assert.IsFalse(fs.GetDirectoryInfo(@"SOMEDIR\NONDIR").Exists);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void FullName(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
Assert.AreEqual(@"\", fs.Root.FullName);
Assert.AreEqual(@"SOMEDIR\", fs.GetDirectoryInfo(@"SOMEDIR").FullName);
Assert.AreEqual(@"SOMEDIR\CHILDDIR\", fs.GetDirectoryInfo(@"SOMEDIR\CHILDDIR").FullName);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Delete(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"Fred");
Assert.AreEqual(1, fs.Root.GetDirectories().Length);
fs.Root.GetDirectories(@"Fred")[0].Delete();
Assert.AreEqual(0, fs.Root.GetDirectories().Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void DeleteRecursive(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"Fred\child");
Assert.AreEqual(1, fs.Root.GetDirectories().Length);
fs.Root.GetDirectories(@"Fred")[0].Delete(true);
Assert.AreEqual(0, fs.Root.GetDirectories().Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
[ExpectedException(typeof(IOException))]
[Category("ThrowsException")]
public void DeleteRoot(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.Root.Delete();
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
[ExpectedException(typeof(IOException))]
[Category("ThrowsException")]
public void DeleteNonEmpty_NonRecursive(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"Fred\child");
fs.Root.GetDirectories(@"Fred")[0].Delete();
}
[TestCaseSource(typeof(FileSystemSource), "QuickReadWriteFileSystems")]
[Category("SlowTest")]
public void CreateDeleteLeakTest(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
for (int i = 0; i < 2000; ++i)
{
fs.CreateDirectory(@"Fred");
fs.Root.GetDirectories(@"Fred")[0].Delete();
}
fs.CreateDirectory(@"SOMEDIR");
DiscDirectoryInfo dirInfo = fs.GetDirectoryInfo(@"SOMEDIR");
Assert.IsNotNull(dirInfo);
for (int i = 0; i < 2000; ++i)
{
fs.CreateDirectory(@"SOMEDIR\Fred");
dirInfo.GetDirectories(@"Fred")[0].Delete();
}
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Move(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"SOMEDIR\CHILD\GCHILD");
fs.GetDirectoryInfo(@"SOMEDIR\CHILD").MoveTo("NEWDIR");
Assert.AreEqual(2, fs.Root.GetDirectories().Length);
Assert.AreEqual(0, fs.Root.GetDirectories("SOMEDIR")[0].GetDirectories().Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Extension(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
Assert.AreEqual("dir", fs.GetDirectoryInfo("fred.dir").Extension);
Assert.AreEqual("", fs.GetDirectoryInfo("fred").Extension);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void GetDirectories(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"SOMEDIR\CHILD\GCHILD");
fs.CreateDirectory(@"A.DIR");
Assert.AreEqual(2, fs.Root.GetDirectories().Length);
DiscDirectoryInfo someDir = fs.Root.GetDirectories(@"SoMeDir")[0];
Assert.AreEqual(1, fs.Root.GetDirectories("SOMEDIR").Length);
Assert.AreEqual("SOMEDIR", someDir.Name);
Assert.AreEqual(1, someDir.GetDirectories("*.*").Length);
Assert.AreEqual("CHILD", someDir.GetDirectories("*.*")[0].Name);
Assert.AreEqual(2, someDir.GetDirectories("*.*", SearchOption.AllDirectories).Length);
Assert.AreEqual(4, fs.Root.GetDirectories("*.*", SearchOption.AllDirectories).Length);
Assert.AreEqual(2, fs.Root.GetDirectories("*.*", SearchOption.TopDirectoryOnly).Length);
Assert.AreEqual(1, fs.Root.GetDirectories("*.DIR", SearchOption.AllDirectories).Length);
Assert.AreEqual(@"A.DIR\", fs.Root.GetDirectories("*.DIR", SearchOption.AllDirectories)[0].FullName);
Assert.AreEqual(1, fs.Root.GetDirectories("GCHILD", SearchOption.AllDirectories).Length);
Assert.AreEqual(@"SOMEDIR\CHILD\GCHILD\", fs.Root.GetDirectories("GCHILD", SearchOption.AllDirectories)[0].FullName);
}
[ExpectedException(typeof(DirectoryNotFoundException))]
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void GetDirectories_BadPath(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.GetDirectories(@"\baddir");
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void GetFiles(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"SOMEDIR\CHILD\GCHILD");
fs.CreateDirectory(@"AAA.DIR");
using (Stream s = fs.OpenFile(@"FOO.TXT", FileMode.Create)) { }
using (Stream s = fs.OpenFile(@"SOMEDIR\CHILD.TXT", FileMode.Create)) { }
using (Stream s = fs.OpenFile(@"SOMEDIR\FOO.TXT", FileMode.Create)) { }
using (Stream s = fs.OpenFile(@"SOMEDIR\CHILD\GCHILD\BAR.TXT", FileMode.Create)) { }
Assert.AreEqual(1, fs.Root.GetFiles().Length);
Assert.AreEqual("FOO.TXT", fs.Root.GetFiles()[0].FullName);
Assert.AreEqual(2, fs.Root.GetDirectories("SOMEDIR")[0].GetFiles("*.TXT").Length);
Assert.AreEqual(4, fs.Root.GetFiles("*.TXT", SearchOption.AllDirectories).Length);
Assert.AreEqual(0, fs.Root.GetFiles("*.DIR", SearchOption.AllDirectories).Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void GetFileSystemInfos(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory(@"SOMEDIR\CHILD\GCHILD");
fs.CreateDirectory(@"AAA.EXT");
using (Stream s = fs.OpenFile(@"FOO.TXT", FileMode.Create)) { }
using (Stream s = fs.OpenFile(@"SOMEDIR\CHILD.EXT", FileMode.Create)) { }
using (Stream s = fs.OpenFile(@"SOMEDIR\FOO.TXT", FileMode.Create)) { }
using (Stream s = fs.OpenFile(@"SOMEDIR\CHILD\GCHILD\BAR.TXT", FileMode.Create)) { }
Assert.AreEqual(3, fs.Root.GetFileSystemInfos().Length);
Assert.AreEqual(1, fs.Root.GetFileSystemInfos("*.EXT").Length);
Assert.AreEqual(2, fs.Root.GetFileSystemInfos("*.?XT").Length);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Parent(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory("SOMEDIR");
Assert.AreEqual(fs.Root, fs.Root.GetDirectories("SOMEDIR")[0].Parent);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Parent_Root(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
Assert.IsNull(fs.Root.Parent);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void CreationTimeUtc(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory("DIR");
Assert.GreaterOrEqual(DateTime.UtcNow, fs.Root.GetDirectories("DIR")[0].CreationTimeUtc);
Assert.LessOrEqual(DateTime.UtcNow.Subtract(TimeSpan.FromSeconds(10)), fs.Root.GetDirectories("DIR")[0].CreationTimeUtc);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void CreationTime(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory("DIR");
Assert.GreaterOrEqual(DateTime.Now, fs.Root.GetDirectories("DIR")[0].CreationTime);
Assert.LessOrEqual(DateTime.Now.Subtract(TimeSpan.FromSeconds(10)), fs.Root.GetDirectories("DIR")[0].CreationTime);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void LastAccessTime(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory("DIR");
DiscDirectoryInfo di = fs.GetDirectoryInfo("DIR");
DateTime baseTime = DateTime.Now - TimeSpan.FromDays(2);
di.LastAccessTime = baseTime;
fs.CreateDirectory(@"DIR\CHILD");
Assert.Less(baseTime, di.LastAccessTime);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void LastWriteTime(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
fs.CreateDirectory("DIR");
DiscDirectoryInfo di = fs.GetDirectoryInfo("DIR");
DateTime baseTime = DateTime.Now - TimeSpan.FromMinutes(10);
di.LastWriteTime = baseTime;
fs.CreateDirectory(@"DIR\CHILD");
Assert.Less(baseTime, di.LastWriteTime);
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void Equals(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
Assert.AreEqual(fs.GetDirectoryInfo("foo"), fs.GetDirectoryInfo("foo"));
}
[TestCaseSource(typeof(FileSystemSource), "ReadWriteFileSystems")]
public void RootBehaviour(NewFileSystemDelegate fsFactory)
{
DiscFileSystem fs = fsFactory();
// Not all file systems can modify the root directory, so we just make sure 'get' and 'no-op' change work.
fs.Root.Attributes = fs.Root.Attributes;
fs.Root.CreationTimeUtc = fs.Root.CreationTimeUtc;
fs.Root.LastAccessTimeUtc = fs.Root.LastAccessTimeUtc;
fs.Root.LastWriteTimeUtc = fs.Root.LastWriteTimeUtc;
}
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABILITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.PythonTools.Debugger;
using Microsoft.PythonTools.Debugger.DebugEngine;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Intellisense;
using Microsoft.PythonTools.Options;
using Microsoft.PythonTools.Parsing;
using Microsoft.VisualStudio.InteractiveWindow;
using Microsoft.VisualStudio.InteractiveWindow.Commands;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Utilities;
using Microsoft.VisualStudioTools;
namespace Microsoft.PythonTools.Repl {
[InteractiveWindowRole("Debug")]
[ContentType(PythonCoreConstants.ContentType)]
[ContentType(PredefinedInteractiveCommandsContentTypes.InteractiveCommandContentTypeName)]
internal class PythonDebugReplEvaluator :
IInteractiveEvaluator,
IPythonInteractiveEvaluator,
IMultipleScopeEvaluator,
IPythonInteractiveIntellisense {
private PythonDebugProcessReplEvaluator _activeEvaluator;
private readonly Dictionary<int, PythonDebugProcessReplEvaluator> _evaluators = new Dictionary<int, PythonDebugProcessReplEvaluator>(); // process id to evaluator
private readonly Dictionary<int, Task> _attachingTasks = new Dictionary<int, Task>();
private EnvDTE.DebuggerEvents _debuggerEvents;
private readonly PythonToolsService _pyService;
private readonly IServiceProvider _serviceProvider;
private IInteractiveWindowCommands _commands;
private Uri _documentUri;
private static readonly string currentPrefix = Strings.DebugReplCurrentIndicator;
private static readonly string notCurrentPrefix = Strings.DebugReplNotCurrentIndicator;
public PythonDebugReplEvaluator(IServiceProvider serviceProvider) {
_serviceProvider = serviceProvider;
_pyService = serviceProvider.GetPythonToolsService();
AD7Engine.EngineAttached += new EventHandler<AD7EngineEventArgs>(OnEngineAttached);
AD7Engine.EngineDetaching += new EventHandler<AD7EngineEventArgs>(OnEngineDetaching);
var dte = _serviceProvider.GetDTE();
if (dte != null) {
// running outside of VS, make this work for tests.
_debuggerEvents = dte.Events.DebuggerEvents;
_debuggerEvents.OnEnterBreakMode += new EnvDTE._dispDebuggerEvents_OnEnterBreakModeEventHandler(OnEnterBreakMode);
}
}
internal PythonInteractiveOptions CurrentOptions {
get {
return _pyService.DebugInteractiveOptions;
}
}
private bool IsInDebugBreakMode() {
var dte = _serviceProvider.GetDTE();
if (dte == null) {
// running outside of VS, make this work for tests.
return true;
}
return dte.Debugger.CurrentMode == EnvDTE.dbgDebugMode.dbgBreakMode;
}
private void OnReadyForInput() {
OnReadyForInputAsync().HandleAllExceptions(_serviceProvider, GetType()).DoNotWait();
}
private async Task OnReadyForInputAsync() {
if (IsInDebugBreakMode()) {
foreach (var engine in AD7Engine.GetEngines()) {
if (engine.Process != null) {
if (!_evaluators.ContainsKey(engine.Process.Id)) {
await AttachProcessAsync(engine.Process, engine);
}
}
}
}
}
private void OnEnterBreakMode(EnvDTE.dbgEventReason Reason, ref EnvDTE.dbgExecutionAction ExecutionAction) {
int activeProcessId = _serviceProvider.GetDTE().Debugger.CurrentProcess.ProcessID;
AD7Engine engine = AD7Engine.GetEngines().SingleOrDefault(target => target.Process != null && target.Process.Id == activeProcessId);
if (engine != null) {
long? activeThreadId = ((IThreadIdMapper)engine).GetPythonThreadId((uint)_serviceProvider.GetDTE().Debugger.CurrentThread.ID);
if (activeThreadId != null) {
AttachProcessAsync(engine.Process, engine).ContinueWith(t => {
ChangeActiveThread(activeThreadId.Value, false);
}).HandleAllExceptions(_serviceProvider, GetType()).DoNotWait();
}
}
}
public void ActiveLanguageBufferChanged(ITextBuffer currentBuffer, ITextBuffer previousBuffer) {
}
public bool CanExecuteCode(string text) {
if (_commands.InCommand) {
return true;
}
if (_activeEvaluator != null) {
return _activeEvaluator.CanExecuteCode(text);
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
return CanExecuteCodeExperimental(text);
}
return true;
}
bool CanExecuteCodeExperimental(string text) {
var pr = ParseResult.Complete;
if (string.IsNullOrEmpty(text)) {
return true;
}
if (string.IsNullOrWhiteSpace(text) && text.EndsWithOrdinal("\n")) {
pr = ParseResult.Empty;
return true;
}
var parser = Parser.CreateParser(new StringReader(text), PythonLanguageVersion.None);
parser.ParseInteractiveCode(out pr);
if (pr == ParseResult.IncompleteStatement || pr == ParseResult.Empty) {
return text.EndsWithOrdinal("\n");
}
if (pr == ParseResult.IncompleteToken) {
return false;
}
return true;
}
public Task<ExecutionResult> ExecuteCodeAsync(string text) {
var res = _commands.TryExecuteCommand();
if (res != null) {
return res;
}
if (!IsInDebugBreakMode()) {
NoExecutionIfNotStoppedInDebuggerError();
return ExecutionResult.Succeeded;
}
if (_activeEvaluator != null) {
return _activeEvaluator.ExecuteCodeAsync(text);
} else {
if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
var tid = _serviceProvider.GetDTE().Debugger.CurrentThread.ID;
(bool isSuccessful, string message) result = CustomDebugAdapterProtocolExtension.EvaluateReplRequest(text, tid);
if (!result.isSuccessful) {
CurrentWindow.WriteError(result.message);
return ExecutionResult.Failed;
}
CurrentWindow.Write(result.message);
}
}
return ExecutionResult.Succeeded;
}
public async Task<bool> ExecuteFileAsync(string filename, string extraArgs) {
if (!IsInDebugBreakMode()) {
NoExecutionIfNotStoppedInDebuggerError();
return true;
}
if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
return true;
}
var t = _activeEvaluator?.ExecuteFileAsync(filename, extraArgs);
if (t != null) {
return await t;
}
return true;
}
public void AbortExecution() {
CurrentWindow.WriteErrorLine(Strings.DebugReplAbortNotSupported);
}
public Task<ExecutionResult> Reset() {
CurrentWindow.WriteErrorLine(Strings.DebugReplResetNotSupported);
return ExecutionResult.Succeeded;
}
public string FormatClipboard() {
return PythonCommonInteractiveEvaluator.FormatClipboard(_serviceProvider, CurrentWindow);
}
public void Dispose() {
}
public IEnumerable<string> GetAvailableScopes() {
string[] fixedScopes = new string[] { Strings.DebugReplCurrentFrameScope };
if (_activeEvaluator != null) {
return fixedScopes.Concat(_activeEvaluator.GetAvailableScopes());
} else {
return new string[0];
}
}
public event EventHandler<EventArgs> AvailableScopesChanged;
public event EventHandler<EventArgs> MultipleScopeSupportChanged;
public void SetScope(string scopeName) {
if (_activeEvaluator != null) {
_activeEvaluator.SetScope(scopeName);
} else {
}
}
public string CurrentScopeName => _activeEvaluator?.CurrentScopeName ?? "";
public string CurrentScopePath => _activeEvaluator?.CurrentScopePath ?? "";
public bool EnableMultipleScopes => _activeEvaluator?.EnableMultipleScopes ?? false;
public bool LiveCompletionsOnly {
get { return CurrentOptions.LiveCompletionsOnly; }
}
public IInteractiveWindow CurrentWindow { get; set; }
public VsProjectAnalyzer Analyzer => _activeEvaluator?.Analyzer;
public Task<VsProjectAnalyzer> GetAnalyzerAsync() {
if (_activeEvaluator != null) {
return _activeEvaluator.GetAnalyzerAsync();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
var tid = _serviceProvider.GetDTE().Debugger.CurrentThread.ID;
var currentFrameFilename = CustomDebugAdapterProtocolExtension.GetCurrentFrameFilename(tid);
var project = _serviceProvider.GetProjectContainingFile(currentFrameFilename);
if (project != null) {
return project.GetAnalyzerAsync();
}
}
return Task.FromResult<VsProjectAnalyzer>(null);
}
public Uri DocumentUri {
get {
if (_activeEvaluator != null) {
return _activeEvaluator.DocumentUri;
} else if (_documentUri != null) {
return _documentUri;
} else {
_documentUri = new Uri($"repl://{Guid.NewGuid()}/repl.py");
return _documentUri;
}
}
}
public Uri NextDocumentUri() => _activeEvaluator?.NextDocumentUri();
public bool IsDisconnected => _activeEvaluator?.IsDisconnected ?? true;
public bool IsExecuting => _activeEvaluator?.IsExecuting ?? false;
public string DisplayName => Strings.DebugReplDisplayName;
public IEnumerable<KeyValuePair<string, string>> GetAvailableScopesAndPaths() {
if (_activeEvaluator != null) {
return _activeEvaluator.GetAvailableScopesAndPaths();
}
return Enumerable.Empty<KeyValuePair<string, string>>();
}
public CompletionResult[] GetMemberNames(string text) {
if (_activeEvaluator != null) {
return _activeEvaluator.GetMemberNames(text);
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
var expression = string.Format(CultureInfo.InvariantCulture, "':'.join(dir({0}))", text ?? "");
var tid = _serviceProvider.GetDTE().Debugger.CurrentThread.ID;
(bool isSuccessful, string message) result = CustomDebugAdapterProtocolExtension.EvaluateReplRequest(text, tid);
if (result.isSuccessful) {
var completionResults = result.message
.Split(':')
.Where(r => !string.IsNullOrEmpty(r))
.Select(r => new CompletionResult(r, Interpreter.PythonMemberType.Field))
.ToArray();
return completionResults;
}
}
return new CompletionResult[0];
}
public OverloadDoc[] GetSignatureDocumentation(string text) {
if (_activeEvaluator != null) {
return _activeEvaluator.GetSignatureDocumentation(text);
}
return new OverloadDoc[0];
}
internal void StepOut() {
if (_activeEvaluator != null) {
_activeEvaluator.StepOut();
CurrentWindow.TextView.VisualElement.Focus();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
_serviceProvider.GetDTE().Debugger.CurrentThread.Parent.StepOut();
CurrentWindow.TextView.VisualElement.Focus();
} else {
NoProcessError();
}
}
internal void StepInto() {
if (_activeEvaluator != null) {
_activeEvaluator.StepInto();
CurrentWindow.TextView.VisualElement.Focus();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
_serviceProvider.GetDTE().Debugger.CurrentThread.Parent.StepInto();
CurrentWindow.TextView.VisualElement.Focus();
} else {
NoProcessError();
}
}
internal void StepOver() {
if (_activeEvaluator != null) {
_activeEvaluator.StepOver();
CurrentWindow.TextView.VisualElement.Focus();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
_serviceProvider.GetDTE().Debugger.CurrentThread.Parent.StepOver();
CurrentWindow.TextView.VisualElement.Focus();
} else {
NoProcessError();
}
}
internal void Resume() {
if (_activeEvaluator != null) {
_activeEvaluator.Resume();
CurrentWindow.TextView.VisualElement.Focus();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
_serviceProvider.GetDTE().Debugger.CurrentThread.Parent.Go();
CurrentWindow.TextView.VisualElement.Focus();
} else {
NoProcessError();
}
}
internal void FrameUp() {
if (_activeEvaluator != null) {
_activeEvaluator.FrameUp();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void FrameDown() {
if (_activeEvaluator != null) {
_activeEvaluator.FrameDown();
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void DisplayActiveProcess() {
if (_activeEvaluator != null) {
_activeEvaluator.WriteOutput(_activeEvaluator.ProcessId.ToString());
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
CurrentWindow.WriteLine("None" + Environment.NewLine);
}
}
internal void DisplayActiveThread() {
if (_activeEvaluator != null) {
_activeEvaluator.WriteOutput(_activeEvaluator.ThreadId.ToString());
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void DisplayActiveFrame() {
if (_activeEvaluator != null) {
_activeEvaluator.WriteOutput(_activeEvaluator.FrameId.ToString());
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void ChangeActiveProcess(int id, bool verbose) {
if (_evaluators.Keys.Contains(id)) {
SwitchProcess(_evaluators[id].Process, verbose);
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
CurrentWindow.WriteErrorLine(Strings.DebugReplInvalidProcessId.FormatUI(id));
}
}
internal void ChangeActiveThread(long id, bool verbose) {
if (_activeEvaluator != null) {
PythonThread thread = _activeEvaluator.GetThreads().SingleOrDefault(target => target.Id == id);
if (thread != null) {
_activeEvaluator.SwitchThread(thread, verbose);
} else {
CurrentWindow.WriteErrorLine(Strings.DebugReplInvalidThreadId.FormatUI(id));
}
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void ChangeActiveFrame(int id) {
if (_activeEvaluator != null) {
PythonStackFrame frame = _activeEvaluator.GetFrames().SingleOrDefault(target => target.FrameId == id);
if (frame != null) {
_activeEvaluator.SwitchFrame(frame);
} else {
CurrentWindow.WriteErrorLine(Strings.DebugReplInvalidFrameId.FormatUI(id));
}
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void DisplayProcesses() {
if (_activeEvaluator != null) {
foreach (var target in _evaluators.Values) {
if (target.Process != null) {
_activeEvaluator.WriteOutput(Strings.DebugReplProcessesOutput.FormatUI(target.Process.Id, target.Process.LanguageVersion, target.Process.Id == _activeEvaluator.ProcessId ? currentPrefix : notCurrentPrefix));
}
}
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
}
}
internal void DisplayThreads() {
if (_activeEvaluator != null) {
foreach (var target in _activeEvaluator.GetThreads()) {
_activeEvaluator.WriteOutput(Strings.DebugReplThreadsOutput.FormatUI(target.Id, target.Name, target.Id == _activeEvaluator.ThreadId ? currentPrefix : notCurrentPrefix));
}
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
internal void DisplayFrames() {
if (_activeEvaluator != null) {
foreach (var target in _activeEvaluator.GetFrames()) {
_activeEvaluator.WriteOutput(Strings.DebugReplFramesOutput.FormatUI(target.FrameId, target.FunctionName, target.FrameId == _activeEvaluator.FrameId ? currentPrefix : notCurrentPrefix));
}
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
} else {
NoProcessError();
}
}
private void OnEngineAttached(object sender, AD7EngineEventArgs e) {
_serviceProvider.GetUIThread().InvokeAsync(async () => {
await AttachProcessAsync(e.Engine.Process, e.Engine);
}).HandleAllExceptions(_serviceProvider, GetType()).DoNotWait();
}
private void OnEngineDetaching(object sender, AD7EngineEventArgs e) {
_serviceProvider.GetUIThread().InvokeAsync(async () => {
await DetachProcessAsync(e.Engine.Process);
}).HandleAllExceptions(_serviceProvider, GetType()).DoNotWait();
}
private void OnProcessExited(object sender, ProcessExitedEventArgs e) {
_serviceProvider.GetUIThread().InvokeAsync(async () => {
await DetachProcessAsync((PythonProcess)sender);
}).HandleAllExceptions(_serviceProvider, GetType()).DoNotWait();
}
internal void SwitchProcess(PythonProcess process, bool verbose) {
var newEvaluator = _evaluators[process.Id];
if (newEvaluator != _activeEvaluator) {
_activeEvaluator = newEvaluator;
ActiveProcessChanged();
if (verbose) {
CurrentWindow.WriteLine(Strings.DebugReplSwitchProcessOutput.FormatUI(process.Id));
}
} else if (CustomDebugAdapterProtocolExtension.CanUseExperimental()) {
NotSupported();
}
}
internal async Task AttachProcessAsync(PythonProcess process, IThreadIdMapper threadIdMapper) {
// The fact that this is only called from UI thread is no guarantee
// that there won't be more than one "instance" of this method in
// progress at any time (though only one is executing while others are paused).
// It's possible because this is an async method with await(s), and
// UI thread execution will temporarily continue somewhere else when
// await is used, and that somewhere else can be code that calls
// into this method with the same process id!
// The more relevant trigger for this cooperative multitasking is
// the await on evaluator.InitializeAsync, and when that happens
// the evaluator is not in the _evaluators dictionary yet.
// If a second caller comes in (on the same UI thread) during that
// await, it gets past the first check because it's not in _evaluators,
// and then checks the _attachingTasks dictionary and know to wait
// for that instead of creating a new evaluator.
// Note that adding the uninitialized evaluator to the _evaluators
// dictionary would be a potentially bug prone solution, as other
// code may try to use it before it's fully initialized.
_serviceProvider.GetUIThread().MustBeCalledFromUIThread();
if (_evaluators.ContainsKey(process.Id)) {
// Process is already attached, so just switch to it if needed
SwitchProcess(process, false);
return;
}
// Keep track of evaluators that are in progress of attaching, and if
// we are getting called to attach for one that is already in progress,
// just wait for it to finish before returning.
// Important: dictionary must be checked (and updated) before any
// await call to avoid race condition.
Task attachingTask;
TaskCompletionSource<object> attachingTcs = null;
if (_attachingTasks.TryGetValue(process.Id, out attachingTask)) {
await attachingTask;
return;
} else {
attachingTcs = new TaskCompletionSource<object>();
_attachingTasks.Add(process.Id, attachingTcs.Task);
}
process.ProcessExited += new EventHandler<ProcessExitedEventArgs>(OnProcessExited);
var evaluator = new PythonDebugProcessReplEvaluator(_serviceProvider, process, threadIdMapper) {
CurrentWindow = CurrentWindow
};
evaluator.AvailableScopesChanged += new EventHandler<EventArgs>(evaluator_AvailableScopesChanged);
evaluator.MultipleScopeSupportChanged += new EventHandler<EventArgs>(evaluator_MultipleScopeSupportChanged);
await evaluator.InitializeAsync();
_evaluators.Add(process.Id, evaluator);
_activeEvaluator = evaluator;
// Only refresh available scopes after the active evaluator has
// been changed, because that's where the UI will look.
await evaluator.RefreshAvailableScopes();
attachingTcs.SetResult(null);
_attachingTasks.Remove(process.Id);
}
internal async Task DetachProcessAsync(PythonProcess process) {
_serviceProvider.GetUIThread().MustBeCalledFromUIThread();
int id = process.Id;
PythonDebugProcessReplEvaluator evaluator;
if (_evaluators.TryGetValue(id, out evaluator)) {
evaluator.AvailableScopesChanged -= new EventHandler<EventArgs>(evaluator_AvailableScopesChanged);
evaluator.MultipleScopeSupportChanged -= new EventHandler<EventArgs>(evaluator_MultipleScopeSupportChanged);
_evaluators.Remove(id);
if (_activeEvaluator == evaluator) {
_activeEvaluator = null;
}
ActiveProcessChanged();
}
}
private void evaluator_MultipleScopeSupportChanged(object sender, EventArgs e) {
MultipleScopeSupportChanged?.Invoke(this, EventArgs.Empty);
}
private void evaluator_AvailableScopesChanged(object sender, EventArgs e) {
AvailableScopesChanged?.Invoke(this, EventArgs.Empty);
}
private void ActiveProcessChanged() {
MultipleScopeSupportChanged?.Invoke(this, EventArgs.Empty);
AvailableScopesChanged?.Invoke(this, EventArgs.Empty);
}
private void NoProcessError() {
CurrentWindow.WriteErrorLine(Strings.DebugReplNoProcessError);
}
private void NotSupported() {
CurrentWindow.WriteError(Strings.DebugReplFeatureNotSupportedWithExperimentalDebugger);
}
private void NoExecutionIfNotStoppedInDebuggerError() {
CurrentWindow.WriteErrorLine(Strings.DebugReplNoExecutionIfNotStoppedInDebuggerError);
}
public Task<ExecutionResult> InitializeAsync() {
_commands = PythonInteractiveEvaluator.GetInteractiveCommands(_serviceProvider, CurrentWindow, this);
var langBuffer = CurrentWindow.CurrentLanguageBuffer;
if (langBuffer != null) {
// Reinitializing, and our new language buffer does not automatically
// get connected to the Intellisense controller. Let's fix that.
var controller = IntellisenseControllerProvider.GetController(CurrentWindow.TextView);
controller?.ConnectSubjectBuffer(langBuffer);
}
CurrentWindow.TextView.Options.SetOptionValue(InteractiveWindowOptions.SmartUpDown, CurrentOptions.UseSmartHistory);
CurrentWindow.WriteLine(Strings.DebugReplHelpMessage);
CurrentWindow.ReadyForInput += OnReadyForInput;
return ExecutionResult.Succeeded;
}
public Task<ExecutionResult> ResetAsync(bool initialize = true) {
return Reset();
}
public string GetPrompt() {
return _activeEvaluator?.GetPrompt();
}
}
internal static class PythonDebugReplEvaluatorExtensions {
public static PythonDebugReplEvaluator GetPythonDebugReplEvaluator(this IInteractiveWindow window) {
var eval = window?.Evaluator as PythonDebugReplEvaluator;
if (eval != null) {
return eval;
}
eval = (window?.Evaluator as SelectableReplEvaluator)?.Evaluator as PythonDebugReplEvaluator;
return eval;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.Azure.AcceptanceTestsAzureSpecials
{
using System.Linq;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// SubscriptionInCredentialsOperations operations.
/// </summary>
internal partial class SubscriptionInCredentialsOperations : Microsoft.Rest.IServiceOperations<AutoRestAzureSpecialParametersTestClient>, ISubscriptionInCredentialsOperations
{
/// <summary>
/// Initializes a new instance of the SubscriptionInCredentialsOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal SubscriptionInCredentialsOperations(AutoRestAzureSpecialParametersTestClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestAzureSpecialParametersTestClient
/// </summary>
public AutoRestAzureSpecialParametersTestClient Client { get; private set; }
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PostMethodGlobalValidWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PostMethodGlobalValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/method/string/none/path/global/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId));
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to null, and client-side validation should
/// prevent you from making this call
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PostMethodGlobalNullWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PostMethodGlobalNull", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/method/string/none/path/global/null/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId));
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PostMethodGlobalNotProvidedValidWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
if (this.Client.ApiVersion == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PostMethodGlobalNotProvidedValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/method/string/none/path/globalNotProvided/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId));
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (this.Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(this.Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PostPathGlobalValidWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PostPathGlobalValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/path/string/none/path/global/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId));
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// POST method with subscriptionId modeled in credentials. Set the
/// credential subscriptionId to '1234-5678-9012-3456' to succeed
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="ErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async System.Threading.Tasks.Task<Microsoft.Rest.Azure.AzureOperationResponse> PostSwaggerGlobalValidWithHttpMessagesAsync(System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>> customHeaders = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
if (this.Client.SubscriptionId == null)
{
throw new Microsoft.Rest.ValidationException(Microsoft.Rest.ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = Microsoft.Rest.ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = Microsoft.Rest.ServiceClientTracing.NextInvocationId.ToString();
System.Collections.Generic.Dictionary<string, object> tracingParameters = new System.Collections.Generic.Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
Microsoft.Rest.ServiceClientTracing.Enter(_invocationId, this, "PostSwaggerGlobalValid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "azurespecials/subscriptionId/swagger/string/none/path/global/1234-5678-9012-3456/{subscriptionId}").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(this.Client.SubscriptionId));
System.Collections.Generic.List<string> _queryParameters = new System.Collections.Generic.List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
System.Net.Http.HttpRequestMessage _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (this.Client.GenerateClientRequestId != null && this.Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (this.Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", this.Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (this.Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await this.Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new Microsoft.Rest.HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new Microsoft.Rest.HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new Microsoft.Rest.Azure.AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
Microsoft.Rest.ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Claims;
using System.Threading;
using System.Threading.Tasks;
using Athene.Inventory.Web.Models;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Athene.Inventory.Abstractions;
using Athene.Inventory.Abstractions.Models;
using Athene.Inventory.Data;
namespace Athene.Inventory.Web.Models
{
public class InMemoryStore<TUser, TRole> :
IUserStore<TUser>,
IUserLoginStore<TUser>,
IUserClaimStore<TUser>,
IUserRoleStore<TUser>,
IUserPasswordStore<TUser>,
IQueryableRoleStore<TRole>,
IUserEmailStore<TUser>,
IUserRepository<TUser>
where TRole : IdentityRole
where TUser : User
{
private readonly Dictionary<string, TUser> _logins = new Dictionary<string, TUser>();
private readonly Dictionary<string, TUser> _users = new Dictionary<string, TUser>();
private readonly Dictionary<string, TRole> _roles = new Dictionary<string, TRole>();
public IQueryable<TUser> Users
{
get { return _users.Values.AsQueryable(); }
}
public IQueryable<TRole> Roles
{
get { return _roles.Values.AsQueryable(); }
}
private string GetLoginKey(string loginProvider, string providerKey)
{
return loginProvider + "|" + providerKey;
}
public virtual Task AddLoginAsync(TUser user, UserLoginInfo login,
CancellationToken cancellationToken = default(CancellationToken))
{
user.Logins.Add(new IdentityUserLogin<string>
{
UserId = user.Id,
ProviderKey = login.ProviderKey,
LoginProvider = login.LoginProvider,
ProviderDisplayName = login.ProviderDisplayName
});
_logins[GetLoginKey(login.LoginProvider, login.ProviderKey)] = user;
return Task.FromResult(0);
}
public Task<IdentityResult> CreateAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
_users[user.Id] = user;
return Task.FromResult(IdentityResult.Success);
}
public Task<IdentityResult> UpdateAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
_users[user.Id] = user;
return Task.FromResult(IdentityResult.Success);
}
public Task<IdentityResult> DeleteAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null || !_users.ContainsKey(user.Id))
{
throw new InvalidOperationException("Unknown user");
}
_users.Remove(user.Id);
return Task.FromResult(IdentityResult.Success);
}
public Task<TUser> FindByIdAsync(string userId, CancellationToken cancellationToken = default(CancellationToken))
{
if (_users.ContainsKey(userId))
{
return Task.FromResult(_users[userId]);
}
return Task.FromResult<TUser>(null);
}
public Task<TUser> FindByLoginAsync(string loginProvider, string providerKey, CancellationToken cancellationToken = default(CancellationToken))
{
string key = GetLoginKey(loginProvider, providerKey);
if (_logins.ContainsKey(key))
{
return Task.FromResult(_logins[key]);
}
return Task.FromResult<TUser>(null);
}
public Task<TUser> FindByNameAsync(string userName, CancellationToken cancellationToken = default(CancellationToken))
{
return
Task.FromResult(
Users.FirstOrDefault(u => u.NormalizedUserName == userName));
}
public Task<IList<UserLoginInfo>> GetLoginsAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
IList<UserLoginInfo> result = user.Logins
.Select(l => new UserLoginInfo(l.LoginProvider, l.ProviderKey, l.ProviderDisplayName)).ToList();
return Task.FromResult(result);
}
public Task<string> GetNormalizedUserNameAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(user.NormalizedUserName);
}
public Task<string> GetUserIdAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(user.Id);
}
public Task<string> GetUserNameAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(user.UserName);
}
public Task RemoveLoginAsync(TUser user, string loginProvider, string providerKey,
CancellationToken cancellationToken = default(CancellationToken))
{
var loginEntity =
user.Logins.SingleOrDefault(
l =>
l.ProviderKey == providerKey && l.LoginProvider == loginProvider &&
l.UserId == user.Id);
if (loginEntity != null)
{
user.Logins.Remove(loginEntity);
}
_logins[GetLoginKey(loginProvider, providerKey)] = null;
return Task.FromResult(0);
}
public Task SetNormalizedUserNameAsync(TUser user, string userName, CancellationToken cancellationToken = default(CancellationToken))
{
user.NormalizedUserName = userName;
return Task.FromResult(0);
}
public Task SetUserNameAsync(TUser user, string userName, CancellationToken cancellationToken = default(CancellationToken))
{
user.UserName = userName;
return Task.FromResult(0);
}
public void Dispose()
{
}
// RoleId == roleName for InMemory
public Task AddToRoleAsync(TUser user, string role, CancellationToken cancellationToken = default(CancellationToken))
{
var roleEntity = _roles.Values.SingleOrDefault(r => r.NormalizedName == role);
if (roleEntity != null)
{
user.Roles.Add(new IdentityUserRole<string> { RoleId = roleEntity.Id, UserId = user.Id });
}
return Task.FromResult(0);
}
// RoleId == roleName for InMemory
public Task RemoveFromRoleAsync(TUser user, string role, CancellationToken cancellationToken = default(CancellationToken))
{
var roleObject = _roles.Values.SingleOrDefault(r => r.NormalizedName == role);
var roleEntity = user.Roles.SingleOrDefault(ur => ur.RoleId == roleObject.Id);
if (roleEntity != null)
{
user.Roles.Remove(roleEntity);
}
return Task.FromResult(0);
}
public Task<IList<string>> GetRolesAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
IList<string> roles = new List<string>();
foreach (var r in user.Roles.Select(ur => ur.RoleId))
{
roles.Add(_roles[r].Name);
}
return Task.FromResult(roles);
}
public Task<bool> IsInRoleAsync(TUser user, string role, CancellationToken cancellationToken = default(CancellationToken))
{
var roleObject = _roles.Values.SingleOrDefault(r => r.NormalizedName == role);
bool result = roleObject != null && user.Roles.Any(ur => ur.RoleId == roleObject.Id);
return Task.FromResult(result);
}
// RoleId == rolename for inmemory store tests
public Task<IList<TUser>> GetUsersInRoleAsync(string roleName, CancellationToken cancellationToken = default(CancellationToken))
{
if (String.IsNullOrEmpty(roleName))
{
throw new ArgumentNullException(nameof(roleName));
}
var role = _roles.Values.Where(x => x.NormalizedName.Equals(roleName)).SingleOrDefault();
if (role == null)
{
return Task.FromResult<IList<TUser>>(new List<TUser>());
}
return Task.FromResult<IList<TUser>>(Users.Where(u => (u.Roles.Where(x => x.RoleId == role.Id).Count() > 0)).Select(x => x).ToList());
}
public Task<IdentityResult> CreateAsync(TRole role, CancellationToken cancellationToken = default(CancellationToken))
{
_roles[role.Id] = role;
return Task.FromResult(IdentityResult.Success);
}
public Task<IdentityResult> UpdateAsync(TRole role, CancellationToken cancellationToken = default(CancellationToken))
{
_roles[role.Id] = role;
return Task.FromResult(IdentityResult.Success);
}
public Task<IdentityResult> DeleteAsync(TRole role, CancellationToken cancellationToken = default(CancellationToken))
{
if (role == null || !_roles.ContainsKey(role.Id))
{
throw new InvalidOperationException("Unknown role");
}
_roles.Remove(role.Id);
return Task.FromResult(IdentityResult.Success);
}
public Task<string> GetRoleIdAsync(TRole role, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(role.Id);
}
public Task<string> GetRoleNameAsync(TRole role, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(role.Name);
}
public Task SetRoleNameAsync(TRole role, string roleName, CancellationToken cancellationToken = default(CancellationToken))
{
role.Name = roleName;
return Task.FromResult(0);
}
public Task<string> GetNormalizedRoleNameAsync(TRole role, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(role.NormalizedName);
}
public Task SetNormalizedRoleNameAsync(TRole role, string normalizedName, CancellationToken cancellationToken = default(CancellationToken))
{
role.NormalizedName = normalizedName;
return Task.FromResult(0);
}
Task<TRole> IRoleStore<TRole>.FindByIdAsync(string roleId, CancellationToken cancellationToken)
{
if (_roles.ContainsKey(roleId))
{
return Task.FromResult(_roles[roleId]);
}
return Task.FromResult<TRole>(null);
}
Task<TRole> IRoleStore<TRole>.FindByNameAsync(string roleName, CancellationToken cancellationToken)
{
return
Task.FromResult(
Roles.SingleOrDefault(r => String.Equals(r.NormalizedName, roleName, StringComparison.OrdinalIgnoreCase)));
}
public Task SetPasswordHashAsync(TUser user, string passwordHash, CancellationToken cancellationToken = default(CancellationToken))
{
user.PasswordHash = passwordHash;
return Task.FromResult(0);
}
public Task<string> GetPasswordHashAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(user.PasswordHash);
}
public Task<bool> HasPasswordAsync(TUser user, CancellationToken cancellationToken = default(CancellationToken))
{
return Task.FromResult(user.PasswordHash != null);
}
public Task SetEmailAsync(TUser user, string email, CancellationToken cancellationToken)
{
user.Email = email;
return Task.FromResult(0);
}
public Task<string> GetEmailAsync(TUser user, CancellationToken cancellationToken)
{
return Task.FromResult(user.Email);
}
public Task<bool> GetEmailConfirmedAsync(TUser user, CancellationToken cancellationToken)
{
return Task.FromResult(user.EmailConfirmed);
}
public Task SetEmailConfirmedAsync(TUser user, bool confirmed, CancellationToken cancellationToken)
{
user.EmailConfirmed = confirmed;
return Task.FromResult(0);
}
public Task<TUser> FindByEmailAsync(string normalizedEmail, CancellationToken cancellationToken)
{
return Task.FromResult(
Users.SingleOrDefault(u => u.NormalizedEmail == normalizedEmail));
}
public Task<string> GetNormalizedEmailAsync(TUser user, CancellationToken cancellationToken)
{
return Task.FromResult(user.NormalizedEmail);
}
public Task SetNormalizedEmailAsync(TUser user, string normalizedEmail, CancellationToken cancellationToken)
{
user.NormalizedEmail = normalizedEmail;
return Task.FromResult(0);
}
public Task<IList<Claim>> GetClaimsAsync(TUser user, CancellationToken cancellationToken)
{
var claims = user.Claims.Select(c => c.ToClaim()).ToList();
return Task.FromResult<IList<Claim>>(claims);
}
public Task AddClaimsAsync(TUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken)
{
foreach (var claim in claims)
{
user.Claims.Add(new IdentityUserClaim<string> { ClaimType = claim.Type, ClaimValue = claim.Value, UserId = user.Id });
}
return Task.FromResult(0);
}
public Task ReplaceClaimAsync(TUser user, Claim claim, Claim newClaim, CancellationToken cancellationToken)
{
var matchedClaims = user.Claims.Where(uc => uc.ClaimValue == claim.Value && uc.ClaimType == claim.Type).ToList();
foreach (var matchedClaim in matchedClaims)
{
matchedClaim.ClaimValue = newClaim.Value;
matchedClaim.ClaimType = newClaim.Type;
}
return Task.FromResult(0);
}
public Task RemoveClaimsAsync(TUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken)
{
foreach (var claim in claims)
{
var entity =
user.Claims.FirstOrDefault(
uc => uc.UserId == user.Id && uc.ClaimType == claim.Type && uc.ClaimValue == claim.Value);
if (entity != null)
{
user.Claims.Remove(entity);
}
}
return Task.FromResult(0);
}
public Task<IList<TUser>> GetUsersForClaimAsync(Claim claim, CancellationToken cancellationToken)
{
if (claim == null)
{
throw new ArgumentNullException(nameof(claim));
}
var query = from user in Users
where user.Claims.Where(x => x.ClaimType == claim.Type && x.ClaimValue == claim.Value).FirstOrDefault() != null
select user;
return Task.FromResult<IList<TUser>>(query.ToList());
}
public IEnumerable<TUser> FindByMatchcode(string matchcode)
{
var normalizedMatchcode = matchcode.ToLower();
return Users
.Where(u =>
u.FullName.ToLower().Contains(normalizedMatchcode) ||
(u.StudentId != null &&
u.StudentId.Contains(normalizedMatchcode)))
.ToArray();
}
public TUser FindByUserId(string userId)
{
return Users.SingleOrDefault(u => u.Id.ToString() == userId);
}
public IEnumerable<TUser> FindByUserIds(IEnumerable<string> userIds)
{
return Users.Where(u => userIds.Contains(u.Id));
}
public void Add(TUser user)
{
if (user.Id == null)
user.Id = Guid.NewGuid().ToString();
_users[user.Id] = (TUser)user;
}
public void AddRange(IEnumerable<TUser> users)
{
foreach(var user in users)
{
user.Id = Guid.NewGuid().ToString();
_users[user.Id] = user;
}
}
}
}
| |
#region
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
#endregion
namespace Secomba
{
public enum Base4KVersion {
V1,
V2
}
public class Base4K
{
// Base addresses for mapping regions
//
private const int BASE_FLAG_START = 0x04000;
private const int BASE1_START = 0x06000;
private const int BASE1_START_LEGACY = 0x05000;
// Sizes of each mapping region
//
private const int BASE_FLAG_SIZE = 0x100;
private const int BASE1_SIZE = 0x01000;
private readonly Base4KVersion _version;
public Base4K(Base4KVersion version = Base4KVersion.V2) {
_version = version;
}
/// <summary>
/// Encodes the specified raw data as Base4k.
/// </summary>
/// <param name="raw">The raw.</param>
/// <returns></returns>
public string Encode(byte[] raw)
{
Encoding enc = new UTF8Encoding(true, true);
using(var buffer = new MemoryStream()) {
int offset;
for(var i = 0; i < raw.Length*2 - 2; i += 3) {
if(i%2 == 0) {
offset = ((raw[i/2] << 4) | ((raw[i/2 + 1] >> 4) & 0x0f)) & 0x0fff; // In Java original >>
} else {
offset = ((raw[i/2] << 8) | (raw[i/2 + 1] & 0xff)) & 0x0fff;
}
offset += _version == Base4KVersion.V1 ? BASE1_START_LEGACY : BASE1_START;
// now that offset is a valid unicode character: code it to utf-8
var utfByets = ToUtf8(offset);
buffer.Write(utfByets, 0, utfByets.Length);
}
if((raw.Length*2)%3 == 2) {
offset = (raw[raw.Length - 1] & 0xff) + BASE_FLAG_START;
var utfByets = ToUtf8(offset);
buffer.Write(utfByets, 0, utfByets.Length);
} else if((raw.Length*2)%3 == 1) {
offset = ((raw[raw.Length - 1] & 0x0f)) + BASE_FLAG_START;
var utfByets = ToUtf8(offset);
buffer.Write(utfByets, 0, utfByets.Length);
}
return enc.GetString(buffer.ToArray());
}
}
/// <summary>
/// Decodes the specified encoded from Base65 - returns null if decoding failed.
/// </summary>
/// <param name="encoded">The encoded.</param>
/// <returns></returns>
public byte[] Decode(string encoded) {
byte[] result = DecodeInternal(encoded, BASE1_START);
if (result == null) {
result = DecodeInternal(encoded, BASE1_START_LEGACY);
}
return result;
}
/// <summary>
/// Decodes the specified encoded from Base65 - returns null if decoding failed.
/// </summary>
/// <param name="encoded">The encoded.</param>
/// <returns></returns>
private byte[] DecodeInternal(string encoded, int base1Start)
{
int code;
int nrOfBytes;
Encoding enc = new UTF8Encoding(true, true);
byte[] encBytes = enc.GetBytes(encoded);
using(var buffer = new MemoryStream()) {
var intCollector = new List<int>();
for(var i = 0; i < encBytes.Length;) {
if((encBytes[i] & 0x80) == 0) {
// 1 byte
nrOfBytes = 1;
} else if((encBytes[i] & 0x20) == 0) {
// 2 bytes
nrOfBytes = 2;
} else if((encBytes[i] & 0x10) == 0) {
// 3 bytes
nrOfBytes = 3;
} else {
// 4 bytes
nrOfBytes = 4;
}
code = ToCode(encBytes, i, nrOfBytes);
i += nrOfBytes;
if(!(code >= base1Start && code < base1Start + BASE1_SIZE)) {
if(i < encBytes.Length || !(code >= BASE_FLAG_START && code < BASE_FLAG_START + BASE_FLAG_SIZE)) {
return null;
}
}
intCollector.Add(code);
}
var tempCodeBuffer = intCollector.ToArray();
for(var i = 0; i < tempCodeBuffer.Length; i++) {
if(tempCodeBuffer[i] >= base1Start) {
tempCodeBuffer[i] -= base1Start;
} else {
tempCodeBuffer[i] -= BASE_FLAG_START;
if(i%2 == 0) {
buffer.WriteByte((byte)tempCodeBuffer[i]);
} else {
buffer.WriteByte((byte)(((tempCodeBuffer[i - 1] << 4) | ((tempCodeBuffer[i] & 0x0f)) & 0xff)));
}
break;
}
if(i%2 == 0) {
buffer.WriteByte((byte)(tempCodeBuffer[i] >> 4)); // In Java original >>>
} else {
buffer.WriteByte((byte)(((tempCodeBuffer[i - 1] << 4) | ((tempCodeBuffer[i] & 0x0f00) >> 8)) & 0xff)); // In Java original >>>
buffer.WriteByte((byte)(tempCodeBuffer[i] & 0xff));
}
}
return buffer.ToArray();
}
}
/// <summary>
/// Converts a code to the UTF8.
/// </summary>
/// <param name="code">The code.</param>
/// <returns></returns>
private byte[] ToUtf8(int code)
{
byte[] result;
// test for big codes first, since small ones generally don't occur.
if(code > 0xffff) {
result = new byte[4];
result[0] = (byte)(0xf0 | ((code >> 18) & 0x07));
result[1] = (byte)(0x80 | ((code >> 12) & 0x3f));
result[2] = (byte)(0x80 | ((code >> 6) & 0x3f));
result[3] = (byte)(0x80 | (code & 0x3f));
} else if(code > 0x7ff) {
result = new byte[3];
result[0] = (byte)(0xe0 | ((code >> 12) & 0x0f));
result[1] = (byte)(0x80 | ((code >> 6) & 0x3f));
result[2] = (byte)(0x80 | (code & 0x3f));
} else if(code > 0x7f) {
result = new byte[2];
result[0] = (byte)(0xc0 | ((code >> 6) & 0x1f));
result[1] = (byte)(0x80 | (code & 0x3f));
} else {
result = new byte[1];
result[0] = (byte)(0x00 | (code & 0x7f));
}
return result;
}
/// <summary>
/// Converts a Utf8Char to the code.
/// </summary>
/// <param name="utf8Char">The UTF8 char.</param>
/// <param name="offset">The offset.</param>
/// <param name="length">The length.</param>
/// <returns></returns>
private int ToCode(IList<byte> utf8Char, int offset, int length)
{
var result = 0;
switch(length) {
case 1:
result |= utf8Char[offset];
break;
case 2:
result |= (utf8Char[offset + 0] & 0x1f) << 6;
result |= (utf8Char[offset + 1] & 0x3f);
break;
case 3:
result |= (utf8Char[offset + 0] & 0x0f) << 12;
result |= (utf8Char[offset + 1] & 0x3f) << 6;
result |= (utf8Char[offset + 2] & 0x3f);
break;
case 4:
result |= (utf8Char[offset + 0] & 0x07) << 18;
result |= (utf8Char[offset + 1] & 0x3f) << 12;
result |= (utf8Char[offset + 2] & 0x3f) << 6;
result |= (utf8Char[offset + 3] & 0x3f);
break;
}
return result;
}
}
}
| |
// <copyright file="V89Network.cs" company="WebDriver Committers">
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading.Tasks;
using OpenQA.Selenium.DevTools.V89.Fetch;
using OpenQA.Selenium.DevTools.V89.Network;
namespace OpenQA.Selenium.DevTools.V89
{
/// <summary>
/// Class providing functionality for manipulating network calls using version 89 of the DevTools Protocol
/// </summary>
public class V89Network : DevTools.Network
{
private FetchAdapter fetch;
private NetworkAdapter network;
/// <summary>
/// Initializes a new instance of the <see cref="V89Network"/> class.
/// </summary>
/// <param name="network">The adapter for the Network domain.</param>
/// <param name="fetch">The adapter for the Fetch domain.</param>
public V89Network(NetworkAdapter network, FetchAdapter fetch)
{
this.network = network;
this.fetch = fetch;
fetch.AuthRequired += OnFetchAuthRequired;
fetch.RequestPaused += OnFetchRequestPaused;
network.ResponseReceived += OnNetworkResponseReceived;
}
/// <summary>
/// Asynchronously disables network caching.
/// </summary>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task DisableNetworkCaching()
{
await network.SetCacheDisabled(new SetCacheDisabledCommandSettings() { CacheDisabled = true });
}
/// <summary>
/// Asynchronously enables network caching.
/// </summary>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task EnableNetworkCaching()
{
await network.SetCacheDisabled(new SetCacheDisabledCommandSettings() { CacheDisabled = false });
}
public override async Task EnableNetwork()
{
await network.Enable(new Network.EnableCommandSettings());
}
public override async Task DisableNetwork()
{
await network.Disable();
}
/// <summary>
/// Asynchronously enables the fetch domain for all URL patterns.
/// </summary>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task EnableFetchForAllPatterns()
{
await fetch.Enable(new OpenQA.Selenium.DevTools.V89.Fetch.EnableCommandSettings()
{
Patterns = new OpenQA.Selenium.DevTools.V89.Fetch.RequestPattern[]
{
new OpenQA.Selenium.DevTools.V89.Fetch.RequestPattern() { UrlPattern = "*" }
},
HandleAuthRequests = true
});
}
/// <summary>
/// Asynchronously diables the fetch domain.
/// </summary>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task DisableFetch()
{
await fetch.Disable();
}
/// <summary>
/// Asynchronously continues an intercepted network request.
/// </summary>
/// <param name="requestData">The <see cref="HttpRequestData"/> of the request.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task ContinueRequest(HttpRequestData requestData)
{
var commandSettings = new ContinueRequestCommandSettings()
{
RequestId = requestData.RequestId,
Method = requestData.Method,
Url = requestData.Url,
};
if (requestData.Headers.Count > 0)
{
List<HeaderEntry> headers = new List<HeaderEntry>();
foreach (KeyValuePair<string, string> headerPair in requestData.Headers)
{
headers.Add(new HeaderEntry() { Name = headerPair.Key, Value = headerPair.Value });
}
commandSettings.Headers = headers.ToArray();
}
if (!string.IsNullOrEmpty(requestData.PostData))
{
commandSettings.PostData = requestData.PostData;
}
await fetch.ContinueRequest(commandSettings);
}
/// <summary>
/// Asynchronously continues an intercepted network request.
/// </summary>
/// <param name="requestData">The <see cref="HttpRequestData"/> of the request.</param>
/// <param name="responseData">The <see cref="HttpResponseData"/> with which to respond to the request</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task ContinueRequestWithResponse(HttpRequestData requestData, HttpResponseData responseData)
{
var commandSettings = new FulfillRequestCommandSettings()
{
RequestId = requestData.RequestId,
ResponseCode = responseData.StatusCode,
};
if (responseData.Headers.Count > 0)
{
List<HeaderEntry> headers = new List<HeaderEntry>();
foreach(KeyValuePair<string, string> headerPair in responseData.Headers)
{
headers.Add(new HeaderEntry() { Name = headerPair.Key, Value = headerPair.Value });
}
commandSettings.ResponseHeaders = headers.ToArray();
}
if (!string.IsNullOrEmpty(responseData.Body))
{
commandSettings.Body = Convert.ToBase64String(Encoding.UTF8.GetBytes(responseData.Body));
}
await fetch.FulfillRequest(commandSettings);
}
/// <summary>
/// Asynchronously contines an intercepted network call without modification.
/// </summary>
/// <param name="requestData">The <see cref="HttpRequestData"/> of the network call.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task ContinueWithoutModification(HttpRequestData requestData)
{
await fetch.ContinueRequest(new ContinueRequestCommandSettings() { RequestId = requestData.RequestId });
}
/// <summary>
/// Asynchronously continues an intercepted network call using authentication.
/// </summary>
/// <param name="requestId">The ID of the network request for which to continue with authentication.</param>
/// <param name="userName">The user name with which to authenticate.</param>
/// <param name="password">The password with which to authenticate.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task ContinueWithAuth(string requestId, string userName, string password)
{
await fetch.ContinueWithAuth(new ContinueWithAuthCommandSettings()
{
RequestId = requestId,
AuthChallengeResponse = new V89.Fetch.AuthChallengeResponse()
{
Response = V89.Fetch.AuthChallengeResponseResponseValues.ProvideCredentials,
Username = userName,
Password = password
}
});
}
/// <summary>
/// Asynchronously cancels authorization of an intercepted network request.
/// </summary>
/// <param name="requestId">The ID of the network request for which to cancel authentication.</param>
/// <returns>A task that represents the asynchronous operation.</returns>
public override async Task CancelAuth(string requestId)
{
await fetch.ContinueWithAuth(new ContinueWithAuthCommandSettings()
{
RequestId = requestId,
AuthChallengeResponse = new OpenQA.Selenium.DevTools.V89.Fetch.AuthChallengeResponse()
{
Response = V89.Fetch.AuthChallengeResponseResponseValues.CancelAuth
}
});
}
private void OnFetchAuthRequired(object sender, Fetch.AuthRequiredEventArgs e)
{
AuthRequiredEventArgs wrapped = new AuthRequiredEventArgs()
{
RequestId = e.RequestId,
Uri = e.Request.Url
};
this.OnAuthRequired(wrapped);
}
private void OnFetchRequestPaused(object sender, Fetch.RequestPausedEventArgs e)
{
RequestPausedEventArgs wrapped = new RequestPausedEventArgs();
if (e.ResponseErrorReason == null && e.ResponseStatusCode == null)
{
wrapped.RequestData = new HttpRequestData()
{
RequestId = e.RequestId,
Method = e.Request.Method,
Url = e.Request.Url,
PostData = e.Request.PostData,
Headers = new Dictionary<string, string>(e.Request.Headers)
};
}
this.OnRequestPaused(wrapped);
}
private async void OnNetworkResponseReceived(object sender, Network.ResponseReceivedEventArgs e)
{
HttpResponseData responseData = new HttpResponseData()
{
StatusCode = e.Response.Status,
Url = e.Response.Url,
ResourceType = e.Type.ToString()
};
foreach(var header in e.Response.Headers)
{
responseData.Headers.Add(header.Key, header.Value);
}
var body = await network.GetResponseBody(new Network.GetResponseBodyCommandSettings() { RequestId = e.RequestId });
if (body.Base64Encoded)
{
responseData.Body = Encoding.UTF8.GetString(Convert.FromBase64String(body.Body));
}
else
{
responseData.Body = body.Body;
}
ResponseReceivedEventArgs wrapped = new ResponseReceivedEventArgs()
{
RequestId = e.RequestId,
ResponseData = responseData
};
this.OnResponseReceived(wrapped);
}
}
}
| |
using System;
using Stream = System.IO.Stream;
using TextReader = System.IO.TextReader;
using StringBuilder = System.Text.StringBuilder;
using Hashtable = System.Collections.Hashtable;
using Assembly = System.Reflection.Assembly;
using EventHandlerList = System.ComponentModel.EventHandlerList;
using BitSet = antlr.collections.impl.BitSet;
using antlr.debug;
namespace antlr
{
/*ANTLR Translator Generator
* Project led by Terence Parr at http://www.jGuru.com
* Software rights: http://www.antlr.org/license.html
*
* $Id:$
*/
//
// ANTLR C# Code Generator by Micheal Jordan
// Kunle Odutola : kunle UNDERSCORE odutola AT hotmail DOT com
// Anthony Oguntimehin
//
// With many thanks to Eric V. Smith from the ANTLR list.
//
public abstract class CharScanner : TokenStream, ICharScannerDebugSubject
{
internal const char NO_CHAR = (char)(0);
public static readonly char EOF_CHAR = Char.MaxValue;
// Used to store event delegates
private EventHandlerList events_ = new EventHandlerList();
protected internal EventHandlerList Events
{
get { return events_; }
}
// The unique keys for each event that CharScanner [objects] can generate
internal static readonly object EnterRuleEventKey = new object();
internal static readonly object ExitRuleEventKey = new object();
internal static readonly object DoneEventKey = new object();
internal static readonly object ReportErrorEventKey = new object();
internal static readonly object ReportWarningEventKey = new object();
internal static readonly object NewLineEventKey = new object();
internal static readonly object MatchEventKey = new object();
internal static readonly object MatchNotEventKey = new object();
internal static readonly object MisMatchEventKey = new object();
internal static readonly object MisMatchNotEventKey = new object();
internal static readonly object ConsumeEventKey = new object();
internal static readonly object LAEventKey = new object();
internal static readonly object SemPredEvaluatedEventKey = new object();
internal static readonly object SynPredStartedEventKey = new object();
internal static readonly object SynPredFailedEventKey = new object();
internal static readonly object SynPredSucceededEventKey = new object();
protected internal StringBuilder text; // text of current token
protected bool saveConsumedInput = true; // does consume() save characters?
/// <summary>Used for creating Token instances.</summary>
protected TokenCreator tokenCreator;
/// <summary>Used for caching lookahead characters.</summary>
protected char cached_LA1;
protected char cached_LA2;
protected bool caseSensitive = true;
protected bool caseSensitiveLiterals = true;
protected Hashtable literals; // set by subclass
/*Tab chars are handled by tab() according to this value; override
* method to do anything weird with tabs.
*/
protected internal int tabsize = 8;
protected internal IToken returnToken_ = null; // used to return tokens w/o using return val.
protected internal LexerSharedInputState inputState;
/*Used during filter mode to indicate that path is desired.
* A subsequent scan error will report an error as usual if
* acceptPath=true;
*/
protected internal bool commitToPath = false;
/*Used to keep track of indentdepth for traceIn/Out */
protected internal int traceDepth = 0;
public CharScanner()
{
text = new StringBuilder();
setTokenCreator(new CommonToken.CommonTokenCreator());
}
public CharScanner(InputBuffer cb)
: this()
{
inputState = new LexerSharedInputState(cb);
cached_LA2 = inputState.input.LA(2);
cached_LA1 = inputState.input.LA(1);
}
public CharScanner(LexerSharedInputState sharedState)
: this()
{
inputState = sharedState;
if (inputState != null)
{
cached_LA2 = inputState.input.LA(2);
cached_LA1 = inputState.input.LA(1);
}
}
public event TraceEventHandler EnterRule
{
add { Events.AddHandler(EnterRuleEventKey, value); }
remove { Events.RemoveHandler(EnterRuleEventKey, value); }
}
public event TraceEventHandler ExitRule
{
add { Events.AddHandler(ExitRuleEventKey, value); }
remove { Events.RemoveHandler(ExitRuleEventKey, value); }
}
public event TraceEventHandler Done
{
add { Events.AddHandler(DoneEventKey, value); }
remove { Events.RemoveHandler(DoneEventKey, value); }
}
public event MessageEventHandler ErrorReported
{
add { Events.AddHandler(ReportErrorEventKey, value); }
remove { Events.RemoveHandler(ReportErrorEventKey, value); }
}
public event MessageEventHandler WarningReported
{
add { Events.AddHandler(ReportWarningEventKey, value); }
remove { Events.RemoveHandler(ReportWarningEventKey, value); }
}
public event NewLineEventHandler HitNewLine
{
add { Events.AddHandler(NewLineEventKey, value); }
remove { Events.RemoveHandler(NewLineEventKey, value); }
}
public event MatchEventHandler MatchedChar
{
add { Events.AddHandler(MatchEventKey, value); }
remove { Events.RemoveHandler(MatchEventKey, value); }
}
public event MatchEventHandler MatchedNotChar
{
add { Events.AddHandler(MatchNotEventKey, value); }
remove { Events.RemoveHandler(MatchNotEventKey, value); }
}
public event MatchEventHandler MisMatchedChar
{
add { Events.AddHandler(MisMatchEventKey, value); }
remove { Events.RemoveHandler(MisMatchEventKey, value); }
}
public event MatchEventHandler MisMatchedNotChar
{
add { Events.AddHandler(MisMatchNotEventKey, value); }
remove { Events.RemoveHandler(MisMatchNotEventKey, value); }
}
public event TokenEventHandler ConsumedChar
{
add { Events.AddHandler(ConsumeEventKey, value); }
remove { Events.RemoveHandler(ConsumeEventKey, value); }
}
public event TokenEventHandler CharLA
{
add { Events.AddHandler(LAEventKey, value); }
remove { Events.RemoveHandler(LAEventKey, value); }
}
public event SemanticPredicateEventHandler SemPredEvaluated
{
add { Events.AddHandler(SemPredEvaluatedEventKey, value); }
remove { Events.RemoveHandler(SemPredEvaluatedEventKey, value); }
}
public event SyntacticPredicateEventHandler SynPredStarted
{
add { Events.AddHandler(SynPredStartedEventKey, value); }
remove { Events.RemoveHandler(SynPredStartedEventKey, value); }
}
public event SyntacticPredicateEventHandler SynPredFailed
{
add { Events.AddHandler(SynPredFailedEventKey, value); }
remove { Events.RemoveHandler(SynPredFailedEventKey, value); }
}
public event SyntacticPredicateEventHandler SynPredSucceeded
{
add { Events.AddHandler(SynPredSucceededEventKey, value); }
remove { Events.RemoveHandler(SynPredSucceededEventKey, value); }
}
// From interface TokenStream
public virtual IToken nextToken() { return null; }
public virtual void append(char c)
{
if (saveConsumedInput)
{
text.Append(c);
}
}
public virtual void append(string s)
{
if (saveConsumedInput)
{
text.Append(s);
}
}
public virtual void commit()
{
inputState.input.commit();
}
public virtual void recover(RecognitionException ex, BitSet tokenSet)
{
consume();
consumeUntil(tokenSet);
}
public virtual void consume()
{
if (inputState.guessing == 0)
{
if (caseSensitive)
{
append(cached_LA1);
}
else
{
// use input.LA(), not LA(), to get original case
// CharScanner.LA() would toLower it.
append(inputState.input.LA(1));
}
if (cached_LA1 == '\t')
{
tab();
}
else
{
inputState.column++;
}
}
if (caseSensitive)
{
cached_LA1 = inputState.input.consume();
cached_LA2 = inputState.input.LA(2);
}
else
{
cached_LA1 = toLower(inputState.input.consume());
cached_LA2 = toLower(inputState.input.LA(2));
}
}
/*Consume chars until one matches the given char */
public virtual void consumeUntil(int c)
{
while ((EOF_CHAR != cached_LA1) && (c != cached_LA1))
{
consume();
}
}
/*Consume chars until one matches the given set */
public virtual void consumeUntil(BitSet bset)
{
while (cached_LA1 != EOF_CHAR && !bset.member(cached_LA1))
{
consume();
}
}
public virtual bool getCaseSensitive()
{
return caseSensitive;
}
public bool getCaseSensitiveLiterals()
{
return caseSensitiveLiterals;
}
public virtual int getColumn()
{
return inputState.column;
}
public virtual void setColumn(int c)
{
inputState.column = c;
}
public virtual bool getCommitToPath()
{
return commitToPath;
}
public virtual string getFilename()
{
return inputState.filename;
}
public virtual InputBuffer getInputBuffer()
{
return inputState.input;
}
public virtual LexerSharedInputState getInputState()
{
return inputState;
}
public virtual void setInputState(LexerSharedInputState state)
{
inputState = state;
}
public virtual int getLine()
{
return inputState.line;
}
/*return a copy of the current text buffer */
public virtual string getText()
{
return text.ToString();
}
public virtual IToken getTokenObject()
{
return returnToken_;
}
public virtual char LA(int i)
{
if (i == 1)
{
return cached_LA1;
}
if (i == 2)
{
return cached_LA2;
}
if (caseSensitive)
{
return inputState.input.LA(i);
}
else
{
return toLower(inputState.input.LA(i));
}
}
protected internal virtual IToken makeToken(int t)
{
IToken newToken = null;
bool typeCreated;
try
{
newToken = tokenCreator.Create();
if (newToken != null)
{
newToken.Type = t;
newToken.setColumn(inputState.tokenStartColumn);
newToken.setLine(inputState.tokenStartLine);
// tracking real start line now: newToken.setLine(inputState.line);
newToken.setFilename(inputState.filename);
}
typeCreated = true;
}
catch
{
typeCreated = false;
}
if (!typeCreated)
{
panic("Can't create Token object '" + tokenCreator.TokenTypeName + "'");
newToken = Token.badToken;
}
return newToken;
}
public virtual int mark()
{
return inputState.input.mark();
}
public virtual void match(char c)
{
match((int)c);
}
public virtual void match(int c)
{
if (cached_LA1 != c)
{
throw new MismatchedCharException(cached_LA1, Convert.ToChar(c), false, this);
}
consume();
}
public virtual void match(BitSet b)
{
if (!b.member(cached_LA1))
{
throw new MismatchedCharException(cached_LA1, b, false, this);
}
consume();
}
public virtual void match(string s)
{
int len = s.Length;
for (int i = 0; i < len; i++)
{
if (cached_LA1 != s[i])
{
throw new MismatchedCharException(cached_LA1, s[i], false, this);
}
consume();
}
}
public virtual void matchNot(char c)
{
matchNot((int)c);
}
public virtual void matchNot(int c)
{
if (cached_LA1 == c)
{
throw new MismatchedCharException(cached_LA1, Convert.ToChar(c), true, this);
}
consume();
}
public virtual void matchRange(int c1, int c2)
{
if (cached_LA1 < c1 || cached_LA1 > c2)
{
throw new MismatchedCharException(cached_LA1, Convert.ToChar(c1), Convert.ToChar(c2), false, this);
}
consume();
}
public virtual void matchRange(char c1, char c2)
{
matchRange((int)c1, (int)c2);
}
public virtual void newline()
{
inputState.line++;
inputState.column = 1;
}
/*advance the current column number by an appropriate amount
* according to tab size. This method is called from consume().
*/
public virtual void tab()
{
int c = getColumn();
int nc = (((c - 1) / tabsize) + 1) * tabsize + 1; // calculate tab stop
setColumn(nc);
}
public virtual void setTabSize(int size)
{
tabsize = size;
}
public virtual int getTabSize()
{
return tabsize;
}
public virtual void panic()
{
//Console.Error.WriteLine("CharScanner: panic");
//Environment.Exit(1);
panic("");
}
/// <summary>
/// This method is executed by ANTLR internally when it detected an illegal
/// state that cannot be recovered from.
/// The previous implementation of this method called <see cref="Environment.Exit"/>
/// and writes directly to <see cref="Console.Error"/>, which is usually not
/// appropriate when a translator is embedded into a larger application.
/// </summary>
/// <param name="s">Error message.</param>
public virtual void panic(string s)
{
//Console.Error.WriteLine("CharScanner; panic: " + s);
//Environment.Exit(1);
throw new ANTLRPanicException("CharScanner::panic: " + s);
}
/*Parser error-reporting function can be overridden in subclass */
public virtual void reportError(RecognitionException ex)
{
Console.Error.WriteLine(ex);
}
/*Parser error-reporting function can be overridden in subclass */
public virtual void reportError(string s)
{
if (getFilename() == null)
{
Console.Error.WriteLine("error: " + s);
}
else
{
Console.Error.WriteLine(getFilename() + ": error: " + s);
}
}
/*Parser warning-reporting function can be overridden in subclass */
public virtual void reportWarning(string s)
{
if (getFilename() == null)
{
Console.Error.WriteLine("warning: " + s);
}
else
{
Console.Error.WriteLine(getFilename() + ": warning: " + s);
}
}
public virtual void refresh()
{
if (caseSensitive)
{
cached_LA2 = inputState.input.LA(2);
cached_LA1 = inputState.input.LA(1);
}
else
{
cached_LA2 = toLower(inputState.input.LA(2));
cached_LA1 = toLower(inputState.input.LA(1));
}
}
public virtual void resetState(InputBuffer ib)
{
text.Length = 0;
traceDepth = 0;
inputState.resetInput(ib);
refresh();
}
public void resetState(Stream s)
{
resetState(new ByteBuffer(s));
}
public void resetState(TextReader tr)
{
resetState(new CharBuffer(tr));
}
public virtual void resetText()
{
text.Length = 0;
inputState.tokenStartColumn = inputState.column;
inputState.tokenStartLine = inputState.line;
}
public virtual void rewind(int pos)
{
inputState.input.rewind(pos);
//setColumn(inputState.tokenStartColumn);
if (caseSensitive)
{
cached_LA2 = inputState.input.LA(2);
cached_LA1 = inputState.input.LA(1);
}
else
{
cached_LA2 = toLower(inputState.input.LA(2));
cached_LA1 = toLower(inputState.input.LA(1));
}
}
public virtual void setCaseSensitive(bool t)
{
caseSensitive = t;
if (caseSensitive)
{
cached_LA2 = inputState.input.LA(2);
cached_LA1 = inputState.input.LA(1);
}
else
{
cached_LA2 = toLower(inputState.input.LA(2));
cached_LA1 = toLower(inputState.input.LA(1));
}
}
public virtual void setCommitToPath(bool commit)
{
commitToPath = commit;
}
public virtual void setFilename(string f)
{
inputState.filename = f;
}
public virtual void setLine(int line)
{
inputState.line = line;
}
public virtual void setText(string s)
{
resetText();
text.Append(s);
}
public virtual void setTokenObjectClass(string cl)
{
this.tokenCreator = new ReflectionBasedTokenCreator(this, cl);
}
public virtual void setTokenCreator(TokenCreator tokenCreator)
{
this.tokenCreator = tokenCreator;
}
// Test the token text against the literals table
// Override this method to perform a different literals test
public virtual int testLiteralsTable(int ttype)
{
string tokenText = text.ToString();
if ((tokenText == null) || (tokenText == string.Empty))
return ttype;
else
{
object typeAsObject = literals[tokenText];
return (typeAsObject == null) ? ttype : ((int)typeAsObject);
}
}
/*Test the text passed in against the literals table
* Override this method to perform a different literals test
* This is used primarily when you want to test a portion of
* a token.
*/
public virtual int testLiteralsTable(string someText, int ttype)
{
if ((someText == null) || (someText == string.Empty))
return ttype;
else
{
object typeAsObject = literals[someText];
return (typeAsObject == null) ? ttype : ((int)typeAsObject);
}
}
// Override this method to get more specific case handling
public virtual char toLower(int c)
{
return Char.ToLower(Convert.ToChar(c), System.Globalization.CultureInfo.InvariantCulture);
}
public virtual void traceIndent()
{
for (int i = 0; i < traceDepth; i++)
Console.Out.Write(" ");
}
public virtual void traceIn(string rname)
{
traceDepth += 1;
traceIndent();
Console.Out.WriteLine("> lexer " + rname + "; c==" + LA(1));
}
public virtual void traceOut(string rname)
{
traceIndent();
Console.Out.WriteLine("< lexer " + rname + "; c==" + LA(1));
traceDepth -= 1;
}
/*This method is called by YourLexer.nextToken() when the lexer has
* hit EOF condition. EOF is NOT a character.
* This method is not called if EOF is reached during
* syntactic predicate evaluation or during evaluation
* of normal lexical rules, which presumably would be
* an IOException. This traps the "normal" EOF condition.
*
* uponEOF() is called after the complete evaluation of
* the previous token and only if your parser asks
* for another token beyond that last non-EOF token.
*
* You might want to throw token or char stream exceptions
* like: "Heh, premature eof" or a retry stream exception
* ("I found the end of this file, go back to referencing file").
*/
public virtual void uponEOF()
{
}
private class ReflectionBasedTokenCreator : TokenCreator
{
protected ReflectionBasedTokenCreator() { }
public ReflectionBasedTokenCreator(CharScanner owner, string tokenTypeName)
{
this.owner = owner;
SetTokenType(tokenTypeName);
}
#pragma warning disable 0414
private CharScanner owner;
#pragma warning restore 0414
/// <summary>
/// The fully qualified name of the Token type to create.
/// </summary>
private string tokenTypeName;
/// <summary>
/// Type object used as a template for creating tokens by reflection.
/// </summary>
private Type tokenTypeObject;
/// <summary>
/// Returns the fully qualified name of the Token type that this
/// class creates.
/// </summary>
private void SetTokenType(string tokenTypeName)
{
this.tokenTypeName = tokenTypeName;
foreach (Assembly assem in AppDomain.CurrentDomain.GetAssemblies())
{
try
{
tokenTypeObject = assem.GetType(tokenTypeName);
if (tokenTypeObject != null)
{
break;
}
}
catch
{
throw new TypeLoadException("Unable to load Type for Token class '" + tokenTypeName + "'");
}
}
if (tokenTypeObject == null)
throw new TypeLoadException("Unable to load Type for Token class '" + tokenTypeName + "'");
}
/// <summary>
/// Returns the fully qualified name of the Token type that this
/// class creates.
/// </summary>
public override string TokenTypeName
{
get
{
return tokenTypeName;
}
}
/// <summary>
/// Constructs a <see cref="Token"/> instance.
/// </summary>
public override IToken Create()
{
IToken newToken = null;
try
{
newToken = (Token)Activator.CreateInstance(tokenTypeObject);
}
catch
{
// supress exception
}
return newToken;
}
}
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Linq;
using System.Text;
namespace EduHub.Data.Entities
{
/// <summary>
/// Payroll Transaction History Data Set
/// </summary>
[GeneratedCode("EduHub Data", "0.9")]
public sealed partial class PEFHDataSet : EduHubDataSet<PEFH>
{
/// <inheritdoc />
public override string Name { get { return "PEFH"; } }
/// <inheritdoc />
public override bool SupportsEntityLastModified { get { return true; } }
internal PEFHDataSet(EduHubContext Context)
: base(Context)
{
Index_CODE = new Lazy<Dictionary<string, IReadOnlyList<PEFH>>>(() => this.ToGroupedDictionary(i => i.CODE));
Index_INITIATIVE = new Lazy<NullDictionary<string, IReadOnlyList<PEFH>>>(() => this.ToGroupedNullDictionary(i => i.INITIATIVE));
Index_PAY_STEP = new Lazy<NullDictionary<short?, IReadOnlyList<PEFH>>>(() => this.ToGroupedNullDictionary(i => i.PAY_STEP));
Index_PAYITEM = new Lazy<NullDictionary<short?, IReadOnlyList<PEFH>>>(() => this.ToGroupedNullDictionary(i => i.PAYITEM));
Index_SUBPROGRAM = new Lazy<NullDictionary<string, IReadOnlyList<PEFH>>>(() => this.ToGroupedNullDictionary(i => i.SUBPROGRAM));
Index_TID = new Lazy<Dictionary<int, PEFH>>(() => this.ToDictionary(i => i.TID));
Index_TRCENTRE = new Lazy<NullDictionary<string, IReadOnlyList<PEFH>>>(() => this.ToGroupedNullDictionary(i => i.TRCENTRE));
}
/// <summary>
/// Matches CSV file headers to actions, used to deserialize <see cref="PEFH" />
/// </summary>
/// <param name="Headers">The CSV column headers</param>
/// <returns>An array of actions which deserialize <see cref="PEFH" /> fields for each CSV column header</returns>
internal override Action<PEFH, string>[] BuildMapper(IReadOnlyList<string> Headers)
{
var mapper = new Action<PEFH, string>[Headers.Count];
for (var i = 0; i < Headers.Count; i++) {
switch (Headers[i]) {
case "TID":
mapper[i] = (e, v) => e.TID = int.Parse(v);
break;
case "CODE":
mapper[i] = (e, v) => e.CODE = v;
break;
case "PAYITEM":
mapper[i] = (e, v) => e.PAYITEM = v == null ? (short?)null : short.Parse(v);
break;
case "TRCENTRE":
mapper[i] = (e, v) => e.TRCENTRE = v;
break;
case "TRTYPE":
mapper[i] = (e, v) => e.TRTYPE = v;
break;
case "TRBATCH":
mapper[i] = (e, v) => e.TRBATCH = v == null ? (int?)null : int.Parse(v);
break;
case "TRPAYCODE":
mapper[i] = (e, v) => e.TRPAYCODE = v == null ? (short?)null : short.Parse(v);
break;
case "TRPAYPERD":
mapper[i] = (e, v) => e.TRPAYPERD = v == null ? (int?)null : int.Parse(v);
break;
case "TRPERD":
mapper[i] = (e, v) => e.TRPERD = v == null ? (int?)null : int.Parse(v);
break;
case "TRDATE":
mapper[i] = (e, v) => e.TRDATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "TRREF":
mapper[i] = (e, v) => e.TRREF = v;
break;
case "TRCOST":
mapper[i] = (e, v) => e.TRCOST = v == null ? (double?)null : double.Parse(v);
break;
case "TRQTY":
mapper[i] = (e, v) => e.TRQTY = v == null ? (double?)null : double.Parse(v);
break;
case "TRAMT":
mapper[i] = (e, v) => e.TRAMT = v == null ? (decimal?)null : decimal.Parse(v);
break;
case "TRPITYPE":
mapper[i] = (e, v) => e.TRPITYPE = v;
break;
case "TRUNIT":
mapper[i] = (e, v) => e.TRUNIT = v;
break;
case "TRDET":
mapper[i] = (e, v) => e.TRDET = v;
break;
case "TRNEXTPAYDATE":
mapper[i] = (e, v) => e.TRNEXTPAYDATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "TRNEXTPAYPERD":
mapper[i] = (e, v) => e.TRNEXTPAYPERD = v == null ? (int?)null : int.Parse(v);
break;
case "TRPAYSPAN":
mapper[i] = (e, v) => e.TRPAYSPAN = v == null ? (short?)null : short.Parse(v);
break;
case "TRTAXSPAN":
mapper[i] = (e, v) => e.TRTAXSPAN = v == null ? (double?)null : double.Parse(v);
break;
case "PNNEXTPAYDATE":
mapper[i] = (e, v) => e.PNNEXTPAYDATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "SUPER_FUND":
mapper[i] = (e, v) => e.SUPER_FUND = v;
break;
case "SUPER_MEMBER":
mapper[i] = (e, v) => e.SUPER_MEMBER = v;
break;
case "WORKED_HOURS":
mapper[i] = (e, v) => e.WORKED_HOURS = v == null ? (double?)null : double.Parse(v);
break;
case "PAY_STEP":
mapper[i] = (e, v) => e.PAY_STEP = v == null ? (short?)null : short.Parse(v);
break;
case "TRNETT":
mapper[i] = (e, v) => e.TRNETT = v == null ? (decimal?)null : decimal.Parse(v);
break;
case "GST_AMOUNT":
mapper[i] = (e, v) => e.GST_AMOUNT = v == null ? (decimal?)null : decimal.Parse(v);
break;
case "TRGROSS":
mapper[i] = (e, v) => e.TRGROSS = v == null ? (decimal?)null : decimal.Parse(v);
break;
case "PAYSIGNTYPE":
mapper[i] = (e, v) => e.PAYSIGNTYPE = v;
break;
case "SYSTEM_TAX":
mapper[i] = (e, v) => e.SYSTEM_TAX = v;
break;
case "LINE_NO":
mapper[i] = (e, v) => e.LINE_NO = v == null ? (int?)null : int.Parse(v);
break;
case "FLAG":
mapper[i] = (e, v) => e.FLAG = v == null ? (int?)null : int.Parse(v);
break;
case "SUBPROGRAM":
mapper[i] = (e, v) => e.SUBPROGRAM = v;
break;
case "GLPROGRAM":
mapper[i] = (e, v) => e.GLPROGRAM = v;
break;
case "INITIATIVE":
mapper[i] = (e, v) => e.INITIATIVE = v;
break;
case "SPLIT_PERCENT":
mapper[i] = (e, v) => e.SPLIT_PERCENT = v == null ? (double?)null : double.Parse(v);
break;
case "ALTER_TRQTY":
mapper[i] = (e, v) => e.ALTER_TRQTY = v == null ? (double?)null : double.Parse(v);
break;
case "ANNUALISED_LOADING":
mapper[i] = (e, v) => e.ANNUALISED_LOADING = v == null ? (double?)null : double.Parse(v);
break;
case "LW_DATE":
mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "LW_TIME":
mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v);
break;
case "LW_USER":
mapper[i] = (e, v) => e.LW_USER = v;
break;
default:
mapper[i] = MapperNoOp;
break;
}
}
return mapper;
}
/// <summary>
/// Merges <see cref="PEFH" /> delta entities
/// </summary>
/// <param name="Entities">Iterator for base <see cref="PEFH" /> entities</param>
/// <param name="DeltaEntities">List of delta <see cref="PEFH" /> entities</param>
/// <returns>A merged <see cref="IEnumerable{PEFH}"/> of entities</returns>
internal override IEnumerable<PEFH> ApplyDeltaEntities(IEnumerable<PEFH> Entities, List<PEFH> DeltaEntities)
{
HashSet<int> Index_TID = new HashSet<int>(DeltaEntities.Select(i => i.TID));
using (var deltaIterator = DeltaEntities.GetEnumerator())
{
using (var entityIterator = Entities.GetEnumerator())
{
while (deltaIterator.MoveNext())
{
var deltaClusteredKey = deltaIterator.Current.CODE;
bool yieldEntity = false;
while (entityIterator.MoveNext())
{
var entity = entityIterator.Current;
bool overwritten = Index_TID.Remove(entity.TID);
if (entity.CODE.CompareTo(deltaClusteredKey) <= 0)
{
if (!overwritten)
{
yield return entity;
}
}
else
{
yieldEntity = !overwritten;
break;
}
}
yield return deltaIterator.Current;
if (yieldEntity)
{
yield return entityIterator.Current;
}
}
while (entityIterator.MoveNext())
{
yield return entityIterator.Current;
}
}
}
}
#region Index Fields
private Lazy<Dictionary<string, IReadOnlyList<PEFH>>> Index_CODE;
private Lazy<NullDictionary<string, IReadOnlyList<PEFH>>> Index_INITIATIVE;
private Lazy<NullDictionary<short?, IReadOnlyList<PEFH>>> Index_PAY_STEP;
private Lazy<NullDictionary<short?, IReadOnlyList<PEFH>>> Index_PAYITEM;
private Lazy<NullDictionary<string, IReadOnlyList<PEFH>>> Index_SUBPROGRAM;
private Lazy<Dictionary<int, PEFH>> Index_TID;
private Lazy<NullDictionary<string, IReadOnlyList<PEFH>>> Index_TRCENTRE;
#endregion
#region Index Methods
/// <summary>
/// Find PEFH by CODE field
/// </summary>
/// <param name="CODE">CODE value used to find PEFH</param>
/// <returns>List of related PEFH entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> FindByCODE(string CODE)
{
return Index_CODE.Value[CODE];
}
/// <summary>
/// Attempt to find PEFH by CODE field
/// </summary>
/// <param name="CODE">CODE value used to find PEFH</param>
/// <param name="Value">List of related PEFH entities</param>
/// <returns>True if the list of related PEFH entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByCODE(string CODE, out IReadOnlyList<PEFH> Value)
{
return Index_CODE.Value.TryGetValue(CODE, out Value);
}
/// <summary>
/// Attempt to find PEFH by CODE field
/// </summary>
/// <param name="CODE">CODE value used to find PEFH</param>
/// <returns>List of related PEFH entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> TryFindByCODE(string CODE)
{
IReadOnlyList<PEFH> value;
if (Index_CODE.Value.TryGetValue(CODE, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PEFH by INITIATIVE field
/// </summary>
/// <param name="INITIATIVE">INITIATIVE value used to find PEFH</param>
/// <returns>List of related PEFH entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> FindByINITIATIVE(string INITIATIVE)
{
return Index_INITIATIVE.Value[INITIATIVE];
}
/// <summary>
/// Attempt to find PEFH by INITIATIVE field
/// </summary>
/// <param name="INITIATIVE">INITIATIVE value used to find PEFH</param>
/// <param name="Value">List of related PEFH entities</param>
/// <returns>True if the list of related PEFH entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByINITIATIVE(string INITIATIVE, out IReadOnlyList<PEFH> Value)
{
return Index_INITIATIVE.Value.TryGetValue(INITIATIVE, out Value);
}
/// <summary>
/// Attempt to find PEFH by INITIATIVE field
/// </summary>
/// <param name="INITIATIVE">INITIATIVE value used to find PEFH</param>
/// <returns>List of related PEFH entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> TryFindByINITIATIVE(string INITIATIVE)
{
IReadOnlyList<PEFH> value;
if (Index_INITIATIVE.Value.TryGetValue(INITIATIVE, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PEFH by PAY_STEP field
/// </summary>
/// <param name="PAY_STEP">PAY_STEP value used to find PEFH</param>
/// <returns>List of related PEFH entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> FindByPAY_STEP(short? PAY_STEP)
{
return Index_PAY_STEP.Value[PAY_STEP];
}
/// <summary>
/// Attempt to find PEFH by PAY_STEP field
/// </summary>
/// <param name="PAY_STEP">PAY_STEP value used to find PEFH</param>
/// <param name="Value">List of related PEFH entities</param>
/// <returns>True if the list of related PEFH entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByPAY_STEP(short? PAY_STEP, out IReadOnlyList<PEFH> Value)
{
return Index_PAY_STEP.Value.TryGetValue(PAY_STEP, out Value);
}
/// <summary>
/// Attempt to find PEFH by PAY_STEP field
/// </summary>
/// <param name="PAY_STEP">PAY_STEP value used to find PEFH</param>
/// <returns>List of related PEFH entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> TryFindByPAY_STEP(short? PAY_STEP)
{
IReadOnlyList<PEFH> value;
if (Index_PAY_STEP.Value.TryGetValue(PAY_STEP, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PEFH by PAYITEM field
/// </summary>
/// <param name="PAYITEM">PAYITEM value used to find PEFH</param>
/// <returns>List of related PEFH entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> FindByPAYITEM(short? PAYITEM)
{
return Index_PAYITEM.Value[PAYITEM];
}
/// <summary>
/// Attempt to find PEFH by PAYITEM field
/// </summary>
/// <param name="PAYITEM">PAYITEM value used to find PEFH</param>
/// <param name="Value">List of related PEFH entities</param>
/// <returns>True if the list of related PEFH entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByPAYITEM(short? PAYITEM, out IReadOnlyList<PEFH> Value)
{
return Index_PAYITEM.Value.TryGetValue(PAYITEM, out Value);
}
/// <summary>
/// Attempt to find PEFH by PAYITEM field
/// </summary>
/// <param name="PAYITEM">PAYITEM value used to find PEFH</param>
/// <returns>List of related PEFH entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> TryFindByPAYITEM(short? PAYITEM)
{
IReadOnlyList<PEFH> value;
if (Index_PAYITEM.Value.TryGetValue(PAYITEM, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PEFH by SUBPROGRAM field
/// </summary>
/// <param name="SUBPROGRAM">SUBPROGRAM value used to find PEFH</param>
/// <returns>List of related PEFH entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> FindBySUBPROGRAM(string SUBPROGRAM)
{
return Index_SUBPROGRAM.Value[SUBPROGRAM];
}
/// <summary>
/// Attempt to find PEFH by SUBPROGRAM field
/// </summary>
/// <param name="SUBPROGRAM">SUBPROGRAM value used to find PEFH</param>
/// <param name="Value">List of related PEFH entities</param>
/// <returns>True if the list of related PEFH entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindBySUBPROGRAM(string SUBPROGRAM, out IReadOnlyList<PEFH> Value)
{
return Index_SUBPROGRAM.Value.TryGetValue(SUBPROGRAM, out Value);
}
/// <summary>
/// Attempt to find PEFH by SUBPROGRAM field
/// </summary>
/// <param name="SUBPROGRAM">SUBPROGRAM value used to find PEFH</param>
/// <returns>List of related PEFH entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> TryFindBySUBPROGRAM(string SUBPROGRAM)
{
IReadOnlyList<PEFH> value;
if (Index_SUBPROGRAM.Value.TryGetValue(SUBPROGRAM, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PEFH by TID field
/// </summary>
/// <param name="TID">TID value used to find PEFH</param>
/// <returns>Related PEFH entity</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public PEFH FindByTID(int TID)
{
return Index_TID.Value[TID];
}
/// <summary>
/// Attempt to find PEFH by TID field
/// </summary>
/// <param name="TID">TID value used to find PEFH</param>
/// <param name="Value">Related PEFH entity</param>
/// <returns>True if the related PEFH entity is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByTID(int TID, out PEFH Value)
{
return Index_TID.Value.TryGetValue(TID, out Value);
}
/// <summary>
/// Attempt to find PEFH by TID field
/// </summary>
/// <param name="TID">TID value used to find PEFH</param>
/// <returns>Related PEFH entity, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public PEFH TryFindByTID(int TID)
{
PEFH value;
if (Index_TID.Value.TryGetValue(TID, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find PEFH by TRCENTRE field
/// </summary>
/// <param name="TRCENTRE">TRCENTRE value used to find PEFH</param>
/// <returns>List of related PEFH entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> FindByTRCENTRE(string TRCENTRE)
{
return Index_TRCENTRE.Value[TRCENTRE];
}
/// <summary>
/// Attempt to find PEFH by TRCENTRE field
/// </summary>
/// <param name="TRCENTRE">TRCENTRE value used to find PEFH</param>
/// <param name="Value">List of related PEFH entities</param>
/// <returns>True if the list of related PEFH entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByTRCENTRE(string TRCENTRE, out IReadOnlyList<PEFH> Value)
{
return Index_TRCENTRE.Value.TryGetValue(TRCENTRE, out Value);
}
/// <summary>
/// Attempt to find PEFH by TRCENTRE field
/// </summary>
/// <param name="TRCENTRE">TRCENTRE value used to find PEFH</param>
/// <returns>List of related PEFH entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<PEFH> TryFindByTRCENTRE(string TRCENTRE)
{
IReadOnlyList<PEFH> value;
if (Index_TRCENTRE.Value.TryGetValue(TRCENTRE, out value))
{
return value;
}
else
{
return null;
}
}
#endregion
#region SQL Integration
/// <summary>
/// Returns a <see cref="SqlCommand"/> which checks for the existence of a PEFH table, and if not found, creates the table and associated indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1)
BEGIN
CREATE TABLE [dbo].[PEFH](
[TID] int IDENTITY NOT NULL,
[CODE] varchar(10) NOT NULL,
[PAYITEM] smallint NULL,
[TRCENTRE] varchar(10) NULL,
[TRTYPE] varchar(1) NULL,
[TRBATCH] int NULL,
[TRPAYCODE] smallint NULL,
[TRPAYPERD] int NULL,
[TRPERD] int NULL,
[TRDATE] datetime NULL,
[TRREF] varchar(10) NULL,
[TRCOST] float NULL,
[TRQTY] float NULL,
[TRAMT] money NULL,
[TRPITYPE] varchar(1) NULL,
[TRUNIT] varchar(3) NULL,
[TRDET] varchar(30) NULL,
[TRNEXTPAYDATE] datetime NULL,
[TRNEXTPAYPERD] int NULL,
[TRPAYSPAN] smallint NULL,
[TRTAXSPAN] float NULL,
[PNNEXTPAYDATE] datetime NULL,
[SUPER_FUND] varchar(10) NULL,
[SUPER_MEMBER] varchar(20) NULL,
[WORKED_HOURS] float NULL,
[PAY_STEP] smallint NULL,
[TRNETT] money NULL,
[GST_AMOUNT] money NULL,
[TRGROSS] money NULL,
[PAYSIGNTYPE] varchar(1) NULL,
[SYSTEM_TAX] varchar(1) NULL,
[LINE_NO] int NULL,
[FLAG] int NULL,
[SUBPROGRAM] varchar(4) NULL,
[GLPROGRAM] varchar(3) NULL,
[INITIATIVE] varchar(3) NULL,
[SPLIT_PERCENT] float NULL,
[ALTER_TRQTY] float NULL,
[ANNUALISED_LOADING] float NULL,
[LW_DATE] datetime NULL,
[LW_TIME] smallint NULL,
[LW_USER] varchar(128) NULL,
CONSTRAINT [PEFH_Index_TID] PRIMARY KEY NONCLUSTERED (
[TID] ASC
)
);
CREATE CLUSTERED INDEX [PEFH_Index_CODE] ON [dbo].[PEFH]
(
[CODE] ASC
);
CREATE NONCLUSTERED INDEX [PEFH_Index_INITIATIVE] ON [dbo].[PEFH]
(
[INITIATIVE] ASC
);
CREATE NONCLUSTERED INDEX [PEFH_Index_PAY_STEP] ON [dbo].[PEFH]
(
[PAY_STEP] ASC
);
CREATE NONCLUSTERED INDEX [PEFH_Index_PAYITEM] ON [dbo].[PEFH]
(
[PAYITEM] ASC
);
CREATE NONCLUSTERED INDEX [PEFH_Index_SUBPROGRAM] ON [dbo].[PEFH]
(
[SUBPROGRAM] ASC
);
CREATE NONCLUSTERED INDEX [PEFH_Index_TRCENTRE] ON [dbo].[PEFH]
(
[TRCENTRE] ASC
);
END");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which disables all non-clustered table indexes.
/// Typically called before <see cref="SqlBulkCopy"/> to improve performance.
/// <see cref="GetSqlRebuildIndexesCommand(SqlConnection)"/> should be called to rebuild and enable indexes after performance sensitive work is completed.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>A <see cref="SqlCommand"/> which (when executed) will disable all non-clustered table indexes</returns>
public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_INITIATIVE')
ALTER INDEX [PEFH_Index_INITIATIVE] ON [dbo].[PEFH] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_PAY_STEP')
ALTER INDEX [PEFH_Index_PAY_STEP] ON [dbo].[PEFH] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_PAYITEM')
ALTER INDEX [PEFH_Index_PAYITEM] ON [dbo].[PEFH] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_SUBPROGRAM')
ALTER INDEX [PEFH_Index_SUBPROGRAM] ON [dbo].[PEFH] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_TID')
ALTER INDEX [PEFH_Index_TID] ON [dbo].[PEFH] DISABLE;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_TRCENTRE')
ALTER INDEX [PEFH_Index_TRCENTRE] ON [dbo].[PEFH] DISABLE;
");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which rebuilds and enables all non-clustered table indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>A <see cref="SqlCommand"/> which (when executed) will rebuild and enable all non-clustered table indexes</returns>
public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_INITIATIVE')
ALTER INDEX [PEFH_Index_INITIATIVE] ON [dbo].[PEFH] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_PAY_STEP')
ALTER INDEX [PEFH_Index_PAY_STEP] ON [dbo].[PEFH] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_PAYITEM')
ALTER INDEX [PEFH_Index_PAYITEM] ON [dbo].[PEFH] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_SUBPROGRAM')
ALTER INDEX [PEFH_Index_SUBPROGRAM] ON [dbo].[PEFH] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_TID')
ALTER INDEX [PEFH_Index_TID] ON [dbo].[PEFH] REBUILD PARTITION = ALL;
IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[PEFH]') AND name = N'PEFH_Index_TRCENTRE')
ALTER INDEX [PEFH_Index_TRCENTRE] ON [dbo].[PEFH] REBUILD PARTITION = ALL;
");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which deletes the <see cref="PEFH"/> entities passed
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <param name="Entities">The <see cref="PEFH"/> entities to be deleted</param>
public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<PEFH> Entities)
{
SqlCommand command = new SqlCommand();
int parameterIndex = 0;
StringBuilder builder = new StringBuilder();
List<int> Index_TID = new List<int>();
foreach (var entity in Entities)
{
Index_TID.Add(entity.TID);
}
builder.AppendLine("DELETE [dbo].[PEFH] WHERE");
// Index_TID
builder.Append("[TID] IN (");
for (int index = 0; index < Index_TID.Count; index++)
{
if (index != 0)
builder.Append(", ");
// TID
var parameterTID = $"@p{parameterIndex++}";
builder.Append(parameterTID);
command.Parameters.Add(parameterTID, SqlDbType.Int).Value = Index_TID[index];
}
builder.Append(");");
command.Connection = SqlConnection;
command.CommandText = builder.ToString();
return command;
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the PEFH data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the PEFH data set</returns>
public override EduHubDataSetDataReader<PEFH> GetDataSetDataReader()
{
return new PEFHDataReader(Load());
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the PEFH data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the PEFH data set</returns>
public override EduHubDataSetDataReader<PEFH> GetDataSetDataReader(List<PEFH> Entities)
{
return new PEFHDataReader(new EduHubDataSetLoadedReader<PEFH>(this, Entities));
}
// Modest implementation to primarily support SqlBulkCopy
private class PEFHDataReader : EduHubDataSetDataReader<PEFH>
{
public PEFHDataReader(IEduHubDataSetReader<PEFH> Reader)
: base (Reader)
{
}
public override int FieldCount { get { return 42; } }
public override object GetValue(int i)
{
switch (i)
{
case 0: // TID
return Current.TID;
case 1: // CODE
return Current.CODE;
case 2: // PAYITEM
return Current.PAYITEM;
case 3: // TRCENTRE
return Current.TRCENTRE;
case 4: // TRTYPE
return Current.TRTYPE;
case 5: // TRBATCH
return Current.TRBATCH;
case 6: // TRPAYCODE
return Current.TRPAYCODE;
case 7: // TRPAYPERD
return Current.TRPAYPERD;
case 8: // TRPERD
return Current.TRPERD;
case 9: // TRDATE
return Current.TRDATE;
case 10: // TRREF
return Current.TRREF;
case 11: // TRCOST
return Current.TRCOST;
case 12: // TRQTY
return Current.TRQTY;
case 13: // TRAMT
return Current.TRAMT;
case 14: // TRPITYPE
return Current.TRPITYPE;
case 15: // TRUNIT
return Current.TRUNIT;
case 16: // TRDET
return Current.TRDET;
case 17: // TRNEXTPAYDATE
return Current.TRNEXTPAYDATE;
case 18: // TRNEXTPAYPERD
return Current.TRNEXTPAYPERD;
case 19: // TRPAYSPAN
return Current.TRPAYSPAN;
case 20: // TRTAXSPAN
return Current.TRTAXSPAN;
case 21: // PNNEXTPAYDATE
return Current.PNNEXTPAYDATE;
case 22: // SUPER_FUND
return Current.SUPER_FUND;
case 23: // SUPER_MEMBER
return Current.SUPER_MEMBER;
case 24: // WORKED_HOURS
return Current.WORKED_HOURS;
case 25: // PAY_STEP
return Current.PAY_STEP;
case 26: // TRNETT
return Current.TRNETT;
case 27: // GST_AMOUNT
return Current.GST_AMOUNT;
case 28: // TRGROSS
return Current.TRGROSS;
case 29: // PAYSIGNTYPE
return Current.PAYSIGNTYPE;
case 30: // SYSTEM_TAX
return Current.SYSTEM_TAX;
case 31: // LINE_NO
return Current.LINE_NO;
case 32: // FLAG
return Current.FLAG;
case 33: // SUBPROGRAM
return Current.SUBPROGRAM;
case 34: // GLPROGRAM
return Current.GLPROGRAM;
case 35: // INITIATIVE
return Current.INITIATIVE;
case 36: // SPLIT_PERCENT
return Current.SPLIT_PERCENT;
case 37: // ALTER_TRQTY
return Current.ALTER_TRQTY;
case 38: // ANNUALISED_LOADING
return Current.ANNUALISED_LOADING;
case 39: // LW_DATE
return Current.LW_DATE;
case 40: // LW_TIME
return Current.LW_TIME;
case 41: // LW_USER
return Current.LW_USER;
default:
throw new ArgumentOutOfRangeException(nameof(i));
}
}
public override bool IsDBNull(int i)
{
switch (i)
{
case 2: // PAYITEM
return Current.PAYITEM == null;
case 3: // TRCENTRE
return Current.TRCENTRE == null;
case 4: // TRTYPE
return Current.TRTYPE == null;
case 5: // TRBATCH
return Current.TRBATCH == null;
case 6: // TRPAYCODE
return Current.TRPAYCODE == null;
case 7: // TRPAYPERD
return Current.TRPAYPERD == null;
case 8: // TRPERD
return Current.TRPERD == null;
case 9: // TRDATE
return Current.TRDATE == null;
case 10: // TRREF
return Current.TRREF == null;
case 11: // TRCOST
return Current.TRCOST == null;
case 12: // TRQTY
return Current.TRQTY == null;
case 13: // TRAMT
return Current.TRAMT == null;
case 14: // TRPITYPE
return Current.TRPITYPE == null;
case 15: // TRUNIT
return Current.TRUNIT == null;
case 16: // TRDET
return Current.TRDET == null;
case 17: // TRNEXTPAYDATE
return Current.TRNEXTPAYDATE == null;
case 18: // TRNEXTPAYPERD
return Current.TRNEXTPAYPERD == null;
case 19: // TRPAYSPAN
return Current.TRPAYSPAN == null;
case 20: // TRTAXSPAN
return Current.TRTAXSPAN == null;
case 21: // PNNEXTPAYDATE
return Current.PNNEXTPAYDATE == null;
case 22: // SUPER_FUND
return Current.SUPER_FUND == null;
case 23: // SUPER_MEMBER
return Current.SUPER_MEMBER == null;
case 24: // WORKED_HOURS
return Current.WORKED_HOURS == null;
case 25: // PAY_STEP
return Current.PAY_STEP == null;
case 26: // TRNETT
return Current.TRNETT == null;
case 27: // GST_AMOUNT
return Current.GST_AMOUNT == null;
case 28: // TRGROSS
return Current.TRGROSS == null;
case 29: // PAYSIGNTYPE
return Current.PAYSIGNTYPE == null;
case 30: // SYSTEM_TAX
return Current.SYSTEM_TAX == null;
case 31: // LINE_NO
return Current.LINE_NO == null;
case 32: // FLAG
return Current.FLAG == null;
case 33: // SUBPROGRAM
return Current.SUBPROGRAM == null;
case 34: // GLPROGRAM
return Current.GLPROGRAM == null;
case 35: // INITIATIVE
return Current.INITIATIVE == null;
case 36: // SPLIT_PERCENT
return Current.SPLIT_PERCENT == null;
case 37: // ALTER_TRQTY
return Current.ALTER_TRQTY == null;
case 38: // ANNUALISED_LOADING
return Current.ANNUALISED_LOADING == null;
case 39: // LW_DATE
return Current.LW_DATE == null;
case 40: // LW_TIME
return Current.LW_TIME == null;
case 41: // LW_USER
return Current.LW_USER == null;
default:
return false;
}
}
public override string GetName(int ordinal)
{
switch (ordinal)
{
case 0: // TID
return "TID";
case 1: // CODE
return "CODE";
case 2: // PAYITEM
return "PAYITEM";
case 3: // TRCENTRE
return "TRCENTRE";
case 4: // TRTYPE
return "TRTYPE";
case 5: // TRBATCH
return "TRBATCH";
case 6: // TRPAYCODE
return "TRPAYCODE";
case 7: // TRPAYPERD
return "TRPAYPERD";
case 8: // TRPERD
return "TRPERD";
case 9: // TRDATE
return "TRDATE";
case 10: // TRREF
return "TRREF";
case 11: // TRCOST
return "TRCOST";
case 12: // TRQTY
return "TRQTY";
case 13: // TRAMT
return "TRAMT";
case 14: // TRPITYPE
return "TRPITYPE";
case 15: // TRUNIT
return "TRUNIT";
case 16: // TRDET
return "TRDET";
case 17: // TRNEXTPAYDATE
return "TRNEXTPAYDATE";
case 18: // TRNEXTPAYPERD
return "TRNEXTPAYPERD";
case 19: // TRPAYSPAN
return "TRPAYSPAN";
case 20: // TRTAXSPAN
return "TRTAXSPAN";
case 21: // PNNEXTPAYDATE
return "PNNEXTPAYDATE";
case 22: // SUPER_FUND
return "SUPER_FUND";
case 23: // SUPER_MEMBER
return "SUPER_MEMBER";
case 24: // WORKED_HOURS
return "WORKED_HOURS";
case 25: // PAY_STEP
return "PAY_STEP";
case 26: // TRNETT
return "TRNETT";
case 27: // GST_AMOUNT
return "GST_AMOUNT";
case 28: // TRGROSS
return "TRGROSS";
case 29: // PAYSIGNTYPE
return "PAYSIGNTYPE";
case 30: // SYSTEM_TAX
return "SYSTEM_TAX";
case 31: // LINE_NO
return "LINE_NO";
case 32: // FLAG
return "FLAG";
case 33: // SUBPROGRAM
return "SUBPROGRAM";
case 34: // GLPROGRAM
return "GLPROGRAM";
case 35: // INITIATIVE
return "INITIATIVE";
case 36: // SPLIT_PERCENT
return "SPLIT_PERCENT";
case 37: // ALTER_TRQTY
return "ALTER_TRQTY";
case 38: // ANNUALISED_LOADING
return "ANNUALISED_LOADING";
case 39: // LW_DATE
return "LW_DATE";
case 40: // LW_TIME
return "LW_TIME";
case 41: // LW_USER
return "LW_USER";
default:
throw new ArgumentOutOfRangeException(nameof(ordinal));
}
}
public override int GetOrdinal(string name)
{
switch (name)
{
case "TID":
return 0;
case "CODE":
return 1;
case "PAYITEM":
return 2;
case "TRCENTRE":
return 3;
case "TRTYPE":
return 4;
case "TRBATCH":
return 5;
case "TRPAYCODE":
return 6;
case "TRPAYPERD":
return 7;
case "TRPERD":
return 8;
case "TRDATE":
return 9;
case "TRREF":
return 10;
case "TRCOST":
return 11;
case "TRQTY":
return 12;
case "TRAMT":
return 13;
case "TRPITYPE":
return 14;
case "TRUNIT":
return 15;
case "TRDET":
return 16;
case "TRNEXTPAYDATE":
return 17;
case "TRNEXTPAYPERD":
return 18;
case "TRPAYSPAN":
return 19;
case "TRTAXSPAN":
return 20;
case "PNNEXTPAYDATE":
return 21;
case "SUPER_FUND":
return 22;
case "SUPER_MEMBER":
return 23;
case "WORKED_HOURS":
return 24;
case "PAY_STEP":
return 25;
case "TRNETT":
return 26;
case "GST_AMOUNT":
return 27;
case "TRGROSS":
return 28;
case "PAYSIGNTYPE":
return 29;
case "SYSTEM_TAX":
return 30;
case "LINE_NO":
return 31;
case "FLAG":
return 32;
case "SUBPROGRAM":
return 33;
case "GLPROGRAM":
return 34;
case "INITIATIVE":
return 35;
case "SPLIT_PERCENT":
return 36;
case "ALTER_TRQTY":
return 37;
case "ANNUALISED_LOADING":
return 38;
case "LW_DATE":
return 39;
case "LW_TIME":
return 40;
case "LW_USER":
return 41;
default:
throw new ArgumentOutOfRangeException(nameof(name));
}
}
}
#endregion
}
}
| |
using Spatial4n.Core.Context;
using Spatial4n.Core.Shapes;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
namespace Lucene.Net.Spatial.Prefix.Tree
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A <see cref="SpatialPrefixTree"/> which uses a
/// <a href="http://en.wikipedia.org/wiki/Quadtree">quad tree</a> in which an
/// indexed term will be generated for each cell, 'A', 'B', 'C', 'D'.
///
/// @lucene.experimental
/// </summary>
public class QuadPrefixTree : SpatialPrefixTree
{
#region Nested type: Factory
/// <summary>
/// Factory for creating <see cref="QuadPrefixTree"/> instances with useful defaults
/// </summary>
public class Factory : SpatialPrefixTreeFactory
{
protected internal override int GetLevelForDistance(double degrees)
{
var grid = new QuadPrefixTree(m_ctx, MAX_LEVELS_POSSIBLE);
return grid.GetLevelForDistance(degrees);
}
protected internal override SpatialPrefixTree NewSPT()
{
return new QuadPrefixTree(m_ctx, m_maxLevels.HasValue ? m_maxLevels.Value : MAX_LEVELS_POSSIBLE);
}
}
#endregion
public const int MAX_LEVELS_POSSIBLE = 50;//not really sure how big this should be
public const int DEFAULT_MAX_LEVELS = 12;
private readonly double xmin;
private readonly double xmax;
private readonly double ymin;
private readonly double ymax;
private readonly double xmid;
private readonly double ymid;
private readonly double gridW;
public double GridH => gridH;
private readonly double gridH;
internal readonly double[] levelW;
internal readonly double[] levelH;
internal readonly int[] levelS; // side
internal readonly int[] levelN; // number
public QuadPrefixTree(SpatialContext ctx, IRectangle bounds, int maxLevels)
: base(ctx, maxLevels)
{
xmin = bounds.MinX;
xmax = bounds.MaxX;
ymin = bounds.MinY;
ymax = bounds.MaxY;
levelW = new double[maxLevels];
levelH = new double[maxLevels];
levelS = new int[maxLevels];
levelN = new int[maxLevels];
gridW = xmax - xmin;
gridH = ymax - ymin;
this.xmid = xmin + gridW / 2.0;
this.ymid = ymin + gridH / 2.0;
levelW[0] = gridW / 2.0;
levelH[0] = gridH / 2.0;
levelS[0] = 2;
levelN[0] = 4;
for (int i = 1; i < levelW.Length; i++)
{
levelW[i] = levelW[i - 1] / 2.0;
levelH[i] = levelH[i - 1] / 2.0;
levelS[i] = levelS[i - 1] * 2;
levelN[i] = levelN[i - 1] * 4;
}
}
public QuadPrefixTree(SpatialContext ctx)
: this(ctx, DEFAULT_MAX_LEVELS)
{
}
public QuadPrefixTree(SpatialContext ctx, int maxLevels)
: this(ctx, ctx.WorldBounds, maxLevels)
{
}
public virtual void PrintInfo(TextWriter @out)
{
// Format the number to min 3 integer digits and exactly 5 fraction digits
const string FORMAT_STR = @"000.00000";
for (int i = 0; i < m_maxLevels; i++)
{
@out.WriteLine(i + "]\t" + levelW[i].ToString(FORMAT_STR) + "\t" + levelH[i].ToString(FORMAT_STR) + "\t" +
levelS[i] + "\t" + (levelS[i] * levelS[i]));
}
}
public override int GetLevelForDistance(double dist)
{
if (dist == 0)//short circuit
{
return m_maxLevels;
}
for (int i = 0; i < m_maxLevels - 1; i++)
{
//note: level[i] is actually a lookup for level i+1
if (dist > levelW[i] && dist > levelH[i])
{
return i + 1;
}
}
return m_maxLevels;
}
protected internal override Cell GetCell(IPoint p, int level)
{
IList<Cell> cells = new List<Cell>(1);
Build(xmid, ymid, 0, cells, new StringBuilder(), m_ctx.MakePoint(p.X, p.Y), level);
return cells[0];
}
//note cells could be longer if p on edge
public override Cell GetCell(string token)
{
return new QuadCell(this, token);
}
public override Cell GetCell(byte[] bytes, int offset, int len)
{
return new QuadCell(this, bytes, offset, len);
}
private void Build(
double x,
double y,
int level,
IList<Cell> matches,
StringBuilder str,
IShape shape,
int maxLevel)
{
Debug.Assert(str.Length == level);
double w = levelW[level] / 2;
double h = levelH[level] / 2;
// Z-Order
// http://en.wikipedia.org/wiki/Z-order_%28curve%29
CheckBattenberg('A', x - w, y + h, level, matches, str, shape, maxLevel);
CheckBattenberg('B', x + w, y + h, level, matches, str, shape, maxLevel);
CheckBattenberg('C', x - w, y - h, level, matches, str, shape, maxLevel);
CheckBattenberg('D', x + w, y - h, level, matches, str, shape, maxLevel);
}
// possibly consider hilbert curve
// http://en.wikipedia.org/wiki/Hilbert_curve
// http://blog.notdot.net/2009/11/Damn-Cool-Algorithms-Spatial-indexing-with-Quadtrees-and-Hilbert-Curves
// if we actually use the range property in the query, this could be useful
private void CheckBattenberg(
char c,
double cx,
double cy,
int level,
IList<Cell> matches,
StringBuilder str,
IShape shape,
int maxLevel)
{
Debug.Assert(str.Length == level);
double w = levelW[level] / 2;
double h = levelH[level] / 2;
int strlen = str.Length;
IRectangle rectangle = m_ctx.MakeRectangle(cx - w, cx + w, cy - h, cy + h);
SpatialRelation v = shape.Relate(rectangle);
if (SpatialRelation.CONTAINS == v)
{
str.Append(c);
//str.append(SpatialPrefixGrid.COVER);
matches.Add(new QuadCell(this, str.ToString(), v.Transpose()));
}
else if (SpatialRelation.DISJOINT == v)
{
// nothing
}
else // SpatialRelation.WITHIN, SpatialRelation.INTERSECTS
{
str.Append(c);
int nextLevel = level + 1;
if (nextLevel >= maxLevel)
{
//str.append(SpatialPrefixGrid.INTERSECTS);
matches.Add(new QuadCell(this, str.ToString(), v.Transpose()));
}
else
{
Build(cx, cy, nextLevel, matches, str, shape, maxLevel);
}
}
str.Length = strlen;
}
#region Nested type: QuadCell
internal class QuadCell : Cell
{
public QuadCell(QuadPrefixTree outerInstance, string token)
: base(outerInstance, token)
{
}
public QuadCell(QuadPrefixTree outerInstance, string token, SpatialRelation shapeRel)
: base(outerInstance, token)
{
this.m_shapeRel = shapeRel;
}
internal QuadCell(QuadPrefixTree outerInstance, byte[] bytes, int off, int len)
: base(outerInstance, bytes, off, len)
{
}
public override void Reset(byte[] bytes, int off, int len)
{
base.Reset(bytes, off, len);
shape = null;
}
protected internal override ICollection<Cell> GetSubCells()
{
QuadPrefixTree outerInstance = (QuadPrefixTree)this.m_outerInstance;
IList<Cell> cells = new List<Cell>(4);
cells.Add(new QuadCell(outerInstance, TokenString + "A"));
cells.Add(new QuadCell(outerInstance, TokenString + "B"));
cells.Add(new QuadCell(outerInstance, TokenString + "C"));
cells.Add(new QuadCell(outerInstance, TokenString + "D"));
return cells;
}
public override int SubCellsSize => 4;
public override Cell GetSubCell(IPoint p)
{
return m_outerInstance.GetCell(p, Level + 1);//not performant!
}
private IShape shape; //cache
public override IShape Shape
{
get
{
if (shape == null)
{
shape = MakeShape();
}
return shape;
}
}
private IRectangle MakeShape()
{
QuadPrefixTree outerInstance = (QuadPrefixTree)this.m_outerInstance;
string token = TokenString;
double xmin = outerInstance.xmin;
double ymin = outerInstance.ymin;
for (int i = 0; i < token.Length; i++)
{
char c = token[i];
if ('A' == c || 'a' == c)
{
ymin += outerInstance.levelH[i];
}
else if ('B' == c || 'b' == c)
{
xmin += outerInstance.levelW[i];
ymin += outerInstance.levelH[i];
}
else if ('C' == c || 'c' == c)
{
// nothing really
}
else if ('D' == c || 'd' == c)
{
xmin += outerInstance.levelW[i];
}
else
{
throw new Exception("unexpected char: " + c);
}
}
int len = token.Length;
double width;
double height;
if (len > 0)
{
width = outerInstance.levelW[len - 1];
height = outerInstance.levelH[len - 1];
}
else
{
width = outerInstance.gridW;
height = outerInstance.gridH;
}
return outerInstance.m_ctx.MakeRectangle(xmin, xmin + width, ymin, ymin + height);
}
}//QuadCell
#endregion
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Dynamic;
using System.Linq.Expressions;
using System.Reflection;
using System.Reflection.Emit;
using System.Runtime.CompilerServices;
using Microsoft.Scripting.Actions;
using Microsoft.Scripting.Generation;
using Microsoft.Scripting.Utils;
namespace Microsoft.Scripting.Runtime {
/// <summary>
/// Used as the value for the ScriptingRuntimeHelpers.GetDelegate method caching system
/// </summary>
public sealed class DelegateInfo {
#if FEATURE_LCG
private const int TargetIndex = 0;
private const int CallSiteIndex = 1;
private const int ConvertSiteIndex = 2;
private static readonly object TargetPlaceHolder = new object();
private static readonly object CallSitePlaceHolder = new object();
private static readonly object ConvertSitePlaceHolder = new object();
// to enable:
// function x() { }
// someClass.someEvent += delegateType(x)
// someClass.someEvent -= delegateType(x)
//
// We need to avoid re-creating the closure because the delegates won't
// compare equal when removing the delegate if they have different closure
// instances. Therefore we use a weak hashtable to get back the
// original closure. The closures also need to be held via a weak refererence to avoid
// creating a circular reference from the constants target back to the
// target. This is fine because as long as the delegate is referenced
// the object array will stay alive. Once the delegate is gone it's not
// wired up anywhere and -= will never be used again.
//
// Note that the closure content depends on the signature of the delegate. So a single dynamic object
// might need multiple closures if it is converted to delegates of different signatures.
private WeakDictionary<object, WeakReference> _closureMap = new WeakDictionary<object, WeakReference>();
private readonly Type _returnType;
private readonly Type[] _parameterTypes;
private readonly MethodInfo _method;
private readonly InvokeBinder _invokeBinder;
private readonly ConvertBinder _convertBinder;
public DelegateInfo(LanguageContext context, Type returnType, Type[] parameters) {
Assert.NotNull(returnType);
Assert.NotNullItems(parameters);
_returnType = returnType;
_parameterTypes = parameters;
PerfTrack.NoteEvent(PerfTrack.Categories.DelegateCreate, ToString());
if (_returnType != typeof(void)) {
_convertBinder = context.CreateConvertBinder(_returnType, true);
}
_invokeBinder = context.CreateInvokeBinder(new CallInfo(_parameterTypes.Length));
Type[] delegateParams = new Type[1 + _parameterTypes.Length];
delegateParams[0] = typeof(object[]);
for (int i = 0; i < _parameterTypes.Length; i++) {
delegateParams[1 + i] = _parameterTypes[i];
}
EmitClrCallStub(returnType, delegateParams, out _method);
}
public Delegate CreateDelegate(Type delegateType, object dynamicObject) {
Assert.NotNull(delegateType, dynamicObject);
object[] closure;
lock (_closureMap) {
WeakReference weakClosure;
if (!_closureMap.TryGetValue(dynamicObject, out weakClosure) || (closure = (object[])weakClosure.Target) == null) {
closure = new[] { TargetPlaceHolder, CallSitePlaceHolder, ConvertSitePlaceHolder };
_closureMap[dynamicObject] = new WeakReference(closure);
Type[] siteTypes = MakeSiteSignature(_parameterTypes);
CallSite callSite = CallSite.Create(DynamicSiteHelpers.MakeCallSiteDelegate(siteTypes), _invokeBinder);
CallSite convertSite = null;
if (_returnType != typeof(void)) {
convertSite = CallSite.Create(DynamicSiteHelpers.MakeCallSiteDelegate(typeof(object), _returnType), _convertBinder);
}
closure[TargetIndex] = dynamicObject;
closure[CallSiteIndex] = callSite;
closure[ConvertSiteIndex] = convertSite;
}
}
return _method.CreateDelegate(delegateType, closure);
}
private void EmitClrCallStub(Type returnType, Type[] parameterTypes, out MethodInfo method) {
// Create the method with a special name so the langauge compiler knows that method's stack frame is not visible
DynamicILGen cg = Snippets.Shared.CreateDynamicMethod("_Scripting_", returnType, parameterTypes, false);
EmitClrCallStub(cg);
method = cg.Finish();
}
/// <summary>
/// Generates stub to receive the CLR call and then call the dynamic language code.
/// </summary>
private void EmitClrCallStub(ILGen cg) {
List<ReturnFixer> fixers = new List<ReturnFixer>(0);
// Create strongly typed return type from the site.
// This will, among other things, generate tighter code.
Type[] siteTypes = MakeSiteSignature(_parameterTypes);
CallSite callSite = CallSite.Create(DynamicSiteHelpers.MakeCallSiteDelegate(siteTypes), _invokeBinder);
Type siteType = callSite.GetType();
Type convertSiteType = null;
CallSite convertSite = null;
if (_returnType != typeof(void)) {
convertSite = CallSite.Create(DynamicSiteHelpers.MakeCallSiteDelegate(typeof(object), _returnType), _convertBinder);
convertSiteType = convertSite.GetType();
}
LocalBuilder convertSiteLocal = null;
FieldInfo convertTarget = null;
if (_returnType != typeof(void)) {
// load up the conversion logic on the stack
convertSiteLocal = cg.DeclareLocal(convertSiteType);
EmitConstantGet(cg, ConvertSiteIndex, convertSiteType);
cg.Emit(OpCodes.Dup);
cg.Emit(OpCodes.Stloc, convertSiteLocal);
convertTarget = convertSiteType.GetDeclaredField("Target");
cg.EmitFieldGet(convertTarget);
cg.Emit(OpCodes.Ldloc, convertSiteLocal);
}
// load up the invoke logic on the stack
LocalBuilder site = cg.DeclareLocal(siteType);
EmitConstantGet(cg, CallSiteIndex, siteType);
cg.Emit(OpCodes.Dup);
cg.Emit(OpCodes.Stloc, site);
FieldInfo target = siteType.GetDeclaredField("Target");
cg.EmitFieldGet(target);
cg.Emit(OpCodes.Ldloc, site);
EmitConstantGet(cg, TargetIndex, typeof(object));
for (int i = 0; i < _parameterTypes.Length; i++) {
if (_parameterTypes[i].IsByRef) {
ReturnFixer rf = ReturnFixer.EmitArgument(cg, i + 1, _parameterTypes[i]);
if (rf != null) fixers.Add(rf);
} else {
cg.EmitLoadArg(i + 1);
}
}
// emit the invoke for the call
cg.EmitCall(target.FieldType, "Invoke");
// emit the invoke for the convert
if (_returnType == typeof(void)) {
cg.Emit(OpCodes.Pop);
} else {
cg.EmitCall(convertTarget.FieldType, "Invoke");
}
// fixup any references
foreach (ReturnFixer rf in fixers) {
rf.FixReturn(cg);
}
cg.Emit(OpCodes.Ret);
}
private static void EmitConstantGet(ILGen il, int index, Type type) {
il.Emit(OpCodes.Ldarg_0);
il.EmitInt(index);
il.Emit(OpCodes.Ldelem_Ref);
if (type != typeof(object)) {
il.Emit(OpCodes.Castclass, type);
}
}
private static Type[] MakeSiteSignature(Type[] parameterTypes) {
Type[] sig = new Type[parameterTypes.Length + 2];
// target object
sig[0] = typeof(object);
// arguments
for (int i = 0; i < parameterTypes.Length; i++) {
if (parameterTypes[i].IsByRef) {
sig[i + 1] = typeof(object);
} else {
sig[i + 1] = parameterTypes[i];
}
}
// return type
sig[sig.Length - 1] = typeof(object);
return sig;
}
#else
private static Type[] MakeSiteSignature(ParameterInfo[] parameterInfos) {
Type[] sig = new Type[parameterInfos.Length + 2];
// target object
sig[0] = typeof(object);
// arguments
for (int i = 0; i < parameterInfos.Length; i++) {
if (parameterInfos[i].ParameterType.IsByRef) {
sig[i + 1] = typeof(object);
} else {
sig[i + 1] = parameterInfos[i].ParameterType;
}
}
// return type
sig[sig.Length - 1] = typeof(object);
return sig;
}
internal static Delegate CreateDelegateForDynamicObject(LanguageContext context, object dynamicObject, Type delegateType, MethodInfo invoke) {
PerfTrack.NoteEvent(PerfTrack.Categories.DelegateCreate, delegateType.ToString());
Type returnType = invoke.ReturnType;
ParameterInfo[] parameterInfos = invoke.GetParameters();
var parameters = new List<ParameterExpression>();
for (int i = 0; i < parameterInfos.Length; i++) {
parameters.Add(Expression.Parameter(parameterInfos[i].ParameterType, "p" + i));
}
InvokeBinder invokeBinder = context.CreateInvokeBinder(new CallInfo(parameterInfos.Length));
ConvertBinder convertBinder = (returnType != typeof(void)) ? context.CreateConvertBinder(returnType, explicitCast: true) : null;
CallSite invokeSite = CallSite.Create(DynamicSiteHelpers.MakeCallSiteDelegate(MakeSiteSignature(parameterInfos)), invokeBinder);
Type invokeSiteType = invokeSite.GetType();
Type convertSiteType;
CallSite convertSite;
if (convertBinder != null) {
convertSite = CallSite.Create(DynamicSiteHelpers.MakeCallSiteDelegate(typeof(object), returnType), convertBinder);
convertSiteType = convertSite.GetType();
} else {
convertSiteType = null;
convertSite = null;
}
var locals = new List<ParameterExpression>();
ParameterExpression invokeSiteVar = Expression.Parameter(invokeSiteType, "site");
ParameterExpression convertSiteVar = null;
var args = new List<Expression>();
args.Add(invokeSiteVar);
args.Add(Expression.Constant(dynamicObject));
int strongBoxVarsStart = locals.Count;
for (int i = 0; i < parameterInfos.Length; i++) {
if (parameterInfos[i].ParameterType.IsByRef) {
var argType = parameterInfos[i].ParameterType;
Type elementType = argType.GetElementType();
Type concreteType = typeof(StrongBox<>).MakeGenericType(elementType);
var strongBox = Expression.Parameter(concreteType, "box" + i);
locals.Add(strongBox);
args.Add(
Expression.Assign(
strongBox,
Expression.New(
concreteType.GetConstructor(new Type[] { elementType }),
parameters[i]
)
)
);
} else {
args.Add(parameters[i]);
}
}
int strongBoxVarsEnd = locals.Count;
Expression invocation = Expression.Invoke(
Expression.Field(
Expression.Assign(
invokeSiteVar,
Expression.Convert(Expression.Constant(invokeSite), invokeSiteType)
),
invokeSiteType.GetDeclaredField("Target")
),
args
);
if (convertBinder != null) {
convertSiteVar = Expression.Parameter(convertSiteType, "convertSite");
invocation = Expression.Invoke(
Expression.Field(
Expression.Assign(
convertSiteVar,
Expression.Convert(Expression.Constant(convertSite), convertSiteType)
),
convertSiteType.GetDeclaredField("Target")
),
convertSiteVar,
invocation
);
}
locals.Add(invokeSiteVar);
if (convertSiteVar != null) {
locals.Add(convertSiteVar);
}
Expression body;
// copy back from StrongBox.Value
if (strongBoxVarsEnd > strongBoxVarsStart) {
var block = new Expression[1 + strongBoxVarsEnd - strongBoxVarsStart + 1];
var resultVar = Expression.Parameter(invocation.Type, "result");
locals.Add(resultVar);
int b = 0;
int l = strongBoxVarsStart;
// values of strong boxes are initialized in invocation expression:
block[b++] = Expression.Assign(resultVar, invocation);
for (int i = 0; i < parameterInfos.Length; i++) {
if (parameterInfos[i].ParameterType.IsByRef) {
var local = locals[l++];
block[b++] = Expression.Assign(
parameters[i],
Expression.Field(local, local.Type.GetDeclaredField("Value"))
);
}
}
block[b++] = resultVar;
Debug.Assert(l == strongBoxVarsEnd);
Debug.Assert(b == block.Length);
body = Expression.Block(locals, block);
} else {
body = Expression.Block(locals, invocation);
}
var lambda = Expression.Lambda(delegateType, body, "_Scripting_", parameters);
return lambda.Compile();
}
#endif
}
}
| |
using System;
using System.Collections.Generic;
using NUnit.Framework;
namespace SequelocityDotNet.Tests.DataRecordMapperTests
{
[TestFixture]
public class MapTests
{
[Test]
public void Warmup()
{
// This test is intentionally left blank
}
[TestFixture]
public class SimpleMappingTests
{
public class SuperHeroWithFields
{
public int SuperHeroId;
public string SuperHeroName;
public string AlterEgoFirstName;
public string AlterEgoLastName;
}
[Test]
public void Should_Handle_Mapping_To_Types_With_Fields()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", 0 );
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithFields>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int)superHeroId.Value );
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
}
public class SuperHeroWithProperties
{
public int SuperHeroId { get; set; }
public string SuperHeroName { get; set; }
public string AlterEgoFirstName { get; set; }
public string AlterEgoLastName { get; set; }
}
[Test]
public void Should_Handle_Mapping_To_Types_With_Properties()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", 0 );
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithProperties>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int)superHeroId.Value );
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
}
public class SuperHeroWithFieldsAndProperties
{
public int SuperHeroId;
public string SuperHeroName;
public string AlterEgoFirstName { get; set; }
public string AlterEgoLastName { get; set; }
}
[Test]
public void Should_Handle_Mapping_To_Types_With_Fields_And_Properties()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", 0 );
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithFieldsAndProperties>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int)superHeroId.Value );
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
}
}
[TestFixture]
public class NullableMappingTests
{
public class SuperHeroWithFields
{
public int? SuperHeroId;
public string SuperHeroName;
public string AlterEgoFirstName;
public string AlterEgoLastName;
public DateTime? DateOfBirth;
}
[Test]
public void Should_Handle_Mapping_NonNullable_Values_To_Nullable_Type_Fields()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", 500 ); // Assign to a nullable type
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", "06/18/1938" ); // Assign to a nullable type
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithFields>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int?)superHeroId.Value ); // Under test
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
Assert.That( superHero.DateOfBirth == Convert.ToDateTime( dateOfBirth.Value ) ); // Under test
}
[Test]
public void Should_Handle_Mapping_Null_Values_To_Nullable_Type_Fields()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", null ); // Assign to a nullable type
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", null ); // Assign to a nullable type
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithFields>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int?)superHeroId.Value ); // Under test
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
Assert.That( superHero.DateOfBirth == (DateTime?)dateOfBirth.Value ); // Under test
}
public class SuperHeroWithProperties
{
public int? SuperHeroId { get; set; }
public string SuperHeroName { get; set; }
public string AlterEgoFirstName { get; set; }
public string AlterEgoLastName { get; set; }
public DateTime? DateOfBirth { get; set; }
}
[Test]
public void Should_Handle_Mapping_NonNullable_Values_To_Nullable_Type_Properties()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", 500 ); // Assign to a nullable type
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", "06/18/1938" ); // Assign to a nullable type
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithProperties>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int?)superHeroId.Value ); // Under test
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
Assert.That( superHero.DateOfBirth == Convert.ToDateTime( dateOfBirth.Value ) ); // Under test
}
[Test]
public void Should_Handle_Mapping_Null_Values_To_Nullable_Type_Properties()
{
// Arrange
var superHeroId = new KeyValuePair<string, object>( "SuperHeroId", null ); // Assign to a nullable type
var superHeroName = new KeyValuePair<string, object>( "SuperHeroName", "Superman" );
var alterEgoFirstName = new KeyValuePair<string, object>( "AlterEgoFirstName", "Clark" );
var alterEgoLastName = new KeyValuePair<string, object>( "AlterEgoLastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", null ); // Assign to a nullable type
var keyValuePairs = new List<KeyValuePair<string, object>> { superHeroId, superHeroName, alterEgoFirstName, alterEgoLastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var superHero = dataRecord.Map<SuperHeroWithProperties>();
// Assert
Assert.NotNull( superHero );
Assert.That( superHero.SuperHeroId == (int?)superHeroId.Value ); // Under test
Assert.That( superHero.SuperHeroName == superHeroName.Value.ToString() );
Assert.That( superHero.AlterEgoFirstName == alterEgoFirstName.Value.ToString() );
Assert.That( superHero.AlterEgoLastName == alterEgoLastName.Value.ToString() );
Assert.That( superHero.DateOfBirth == (DateTime?)dateOfBirth.Value ); // Under test
}
}
[TestFixture]
public class SimpleTypeConversionTests
{
public class CustomerWithFields
{
public int CustomerId;
public string FirstName;
public string LastName;
public DateTime DateOfBirth;
}
[Test]
public void Should_Handle_Mapping_Simple_Type_Conversions_On_Fields()
{
// Arrange
var customerId = new KeyValuePair<string, object>( "CustomerId", Convert.ToInt64( 5000 ) ); // Assign to int
var firstName = new KeyValuePair<string, object>( "FirstName", "Clark" );
var lastName = new KeyValuePair<string, object>( "LastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", "06/18/1938" );
var keyValuePairs = new List<KeyValuePair<string, object>> { customerId, firstName, lastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var customer = dataRecord.Map<CustomerWithFields>();
// Assert
Assert.NotNull( customer );
Assert.That( customer.CustomerId == Convert.ToInt64( customerId.Value ) ); // Under test
Assert.That( customer.FirstName == firstName.Value.ToString() );
Assert.That( customer.LastName == lastName.Value.ToString() );
Assert.That( customer.DateOfBirth == Convert.ToDateTime( dateOfBirth.Value ) );
}
[Test]
public void Should_Throw_An_Exception_When_Converting_NonStandard_Values_On_Fields()
{
// Arrange
var customerId = new KeyValuePair<string, object>( "CustomerId", Convert.ToInt64( 5000 ) );
var firstName = new KeyValuePair<string, object>( "FirstName", "Clark" );
var lastName = new KeyValuePair<string, object>( "LastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", "June 18th, 1938" ); // Non-standard DateTime value
var keyValuePairs = new List<KeyValuePair<string, object>> { customerId, firstName, lastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<CustomerWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
public class CustomerWithProperties
{
public int CustomerId { get; set; }
public string FirstName { get; set; }
public string LastName { get; set; }
public DateTime DateOfBirth { get; set; }
}
[Test]
public void Should_Handle_Mapping_Simple_Type_Conversions_On_Properties()
{
// Arrange
var customerId = new KeyValuePair<string, object>( "CustomerId", Convert.ToInt64( 5000 ) ); // Assign to int
var firstName = new KeyValuePair<string, object>( "FirstName", "Clark" );
var lastName = new KeyValuePair<string, object>( "LastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", "06/18/1938" );
var keyValuePairs = new List<KeyValuePair<string, object>> { customerId, firstName, lastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var customer = dataRecord.Map<CustomerWithProperties>();
// Assert
Assert.NotNull( customer );
Assert.That( customer.CustomerId == Convert.ToInt64( customerId.Value ) ); // Under test
Assert.That( customer.FirstName == firstName.Value.ToString() );
Assert.That( customer.LastName == lastName.Value.ToString() );
Assert.That( customer.DateOfBirth == Convert.ToDateTime( dateOfBirth.Value ) );
}
[Test]
public void Should_Throw_An_Exception_When_Converting_NonStandard_Values_On_Properties()
{
// Arrange
var customerId = new KeyValuePair<string, object>( "CustomerId", Convert.ToInt64( 5000 ) );
var firstName = new KeyValuePair<string, object>( "FirstName", "Clark" );
var lastName = new KeyValuePair<string, object>( "LastName", "Kent" );
var dateOfBirth = new KeyValuePair<string, object>( "DateOfBirth", "June 18th, 1938" ); // Non-standard DateTime value
var keyValuePairs = new List<KeyValuePair<string, object>> { customerId, firstName, lastName, dateOfBirth };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<CustomerWithProperties>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
}
[TestFixture]
public class EnumConversionTests
{
public class BankAccountWithFields
{
public string AccountHolderFullName;
public BankAccountType AccountType;
public enum BankAccountType
{
Checking,
Savings,
MoneyMarket
}
}
[Test]
public void Should_Handle_Mapping_Strings_To_Enum_Values_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "Savings" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == accountType.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_Integers_To_Enum_Values_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", 1 );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == Enum.ToObject( typeof( BankAccountWithFields.BankAccountType ), accountType.Value ).ToString() );
}
[Test]
public void Should_Use_The_Default_Value_When_Mapping_A_Null_To_An_Enum_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", null );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType == BankAccountWithFields.BankAccountType.Checking ); // Checking is the default value
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_An_Enum_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "asdf;lkj" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
public class BankAccountWithProperties
{
public string AccountHolderFullName { get; set; }
public BankAccountType AccountType { get; set; }
public enum BankAccountType
{
Checking,
Savings,
MoneyMarket
}
}
[Test]
public void Should_Handle_Mapping_Strings_To_Enum_Values_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "Savings" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == accountType.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_Integers_To_Enum_Values_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", 1 );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == Enum.ToObject( typeof( BankAccountWithFields.BankAccountType ), accountType.Value ).ToString() );
}
[Test]
public void Should_Use_The_Default_Value_When_Mapping_A_Null_To_An_Enum_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", null );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType == BankAccountWithFields.BankAccountType.Checking ); // Checking is the default value
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_An_Enum_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "asdf;lkj" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
}
[TestFixture]
public class NullableEnumConversionTests
{
public class BankAccountWithFields
{
public string AccountHolderFullName;
public BankAccountType? AccountType;
public enum BankAccountType
{
Checking,
Savings,
MoneyMarket
}
}
[Test]
public void Should_Handle_Mapping_Strings_To_Enum_Values_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "Savings" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == accountType.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_Integers_To_Enum_Values_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", 1 );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == Enum.ToObject( typeof( BankAccountWithFields.BankAccountType ), accountType.Value ).ToString() );
}
[Test]
public void Should_Assign_Null_When_Mapping_A_Null_To_An_Enum_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", null );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType == null ); // Checking if null
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_An_Enum_On_Fields()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "asdf;lkj" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
public class BankAccountWithProperties
{
public string AccountHolderFullName { get; set; }
public BankAccountType? AccountType { get; set; }
public enum BankAccountType
{
Checking,
Savings,
MoneyMarket
}
}
[Test]
public void Should_Handle_Mapping_Strings_To_Enum_Values_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "Savings" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == accountType.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_Integers_To_Enum_Values_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", 1 );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType.ToString() == Enum.ToObject( typeof( BankAccountWithFields.BankAccountType ), accountType.Value ).ToString() );
}
[Test]
public void Should_Use_The_Default_Value_When_Mapping_A_Null_To_An_Enum_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", null );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var bankAccount = dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.NotNull( bankAccount );
Assert.That( bankAccount.AccountHolderFullName == accountHolderFullName.Value.ToString() );
Assert.That( bankAccount.AccountType == null ); // Checking if null
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_An_Enum_On_Properties()
{
// Arrange
var accountHolderFullName = new KeyValuePair<string, object>( "AccountHolderFullName", "Clark Kent" );
var accountType = new KeyValuePair<string, object>( "AccountType", "asdf;lkj" );
var keyValuePairs = new List<KeyValuePair<string, object>> { accountHolderFullName, accountType };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<BankAccountWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
}
[TestFixture]
public class GuidConversionTests
{
public class MonsterWithFields
{
public Guid MonsterId;
public string MonsterName;
}
[Test]
public void Should_Handle_Mapping_Strings_To_Guid_Values_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithFields>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_ByteArray_To_Guid_Values_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid().ToByteArray() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithFields>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToByteArray().ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Convert_A_Null_To_A_Default_Guid_When_To_Guid_Values_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", null );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithFields>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToString() == default( Guid ).ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_A_Guid_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", "asdf;lkj" );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<MonsterWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
public class MonsterWithProperties
{
public Guid MonsterId;
public string MonsterName;
}
[Test]
public void Should_Handle_Mapping_Strings_To_Guid_Values_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_ByteArray_To_Guid_Values_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid().ToByteArray() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToByteArray().ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Convert_A_Null_To_A_Default_Guid_When_To_Guid_Values_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", null );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToString() == default( Guid ).ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_A_Guid_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", "asdf;lkj" );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
}
[TestFixture]
public class NullableGuidConversionTests
{
public class MonsterWithFields
{
public Guid? MonsterId;
public string MonsterName;
}
[Test]
public void Should_Handle_Mapping_Strings_To_Guid_Values_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithFields>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_ByteArray_To_Guid_Values_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid().ToByteArray() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithFields>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.Value.ToByteArray().ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Convert_A_Null_To_A_Default_Guid_When_To_Guid_Values_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", null );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithFields>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId == null ); // Ensure is null
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_A_Guid_On_Fields()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", "asdf;lkj" );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<MonsterWithFields>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
public class MonsterWithProperties
{
public Guid? MonsterId;
public string MonsterName;
}
[Test]
public void Should_Handle_Mapping_Strings_To_Guid_Values_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Handle_Mapping_ByteArray_To_Guid_Values_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", Guid.NewGuid().ToByteArray() );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId.Value.ToByteArray().ToString() == monsterId.Value.ToString() );
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Convert_A_Null_To_A_Default_Guid_When_To_Guid_Values_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", null );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
var monster = dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.NotNull( monster );
Assert.That( monster.MonsterId == null ); // Ensure is null
Assert.That( monster.MonsterName == monstername.Value.ToString() );
}
[Test]
public void Should_Throw_An_Exception_When_Mapping_An_Invalid_Value_To_A_Guid_On_Properties()
{
// Arrange
var monsterId = new KeyValuePair<string, object>( "MonsterId", "asdf;lkj" );
var monstername = new KeyValuePair<string, object>( "MonsterName", "Frankenstein" );
var keyValuePairs = new List<KeyValuePair<string, object>> { monsterId, monstername };
var dataRecord = new TestHelpers.DataRecord( keyValuePairs );
// Act
TestDelegate action = () => dataRecord.Map<MonsterWithProperties>();
// Assert
Assert.Throws<TypeConverter.TypeConversionException>( action );
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
namespace RoslynPad.Runtime
{
internal class ResultObject
{
private static readonly HashSet<string> s_irrelevantEnumerableProperties = new() { "Count", "Length", "Key" };
private static readonly HashSet<string> s_doNotTreatAsEnumerableTypeNames = new() { "JObject", "JProperty" };
private static readonly Dictionary<string, string> s_toStringAlternatives = new()
{
["JArray"] = "[...]",
["JObject"] = "{...}"
};
private readonly DumpQuotas _quotas;
private readonly MemberInfo? _member;
public static ResultObject Create(object? o, in DumpQuotas quotas, string? header = null) =>
new(o, quotas, header);
internal ResultObject(object? o, in DumpQuotas quotas, string? header = null, MemberInfo? member = null)
{
_quotas = quotas;
_member = member;
IsExpanded = quotas.MaxExpandedDepth > 0;
Initialize(o, header);
}
public string? Header { get; private set; }
public string? Value { get; protected set; }
public string? Type { get; private set; }
public List<ResultObject>? Children { get; private set; }
public bool HasChildren => Children?.Count > 0;
public bool IsExpanded { get; private set; }
private void Initialize(object? o, string? headerPrefix)
{
var targetQuota = _quotas.StepDown();
if (TryPopulateMember(o, targetQuota))
{
return;
}
PopulateObject(o, headerPrefix, targetQuota);
}
private void PopulateObject(object? o, string? headerPrefix, in DumpQuotas targetQuotas)
{
if (o == null)
{
Header = headerPrefix;
Value = "<null>";
return;
}
var isMaxDepth = _quotas.MaxDepth <= 0;
SetType(o);
if (o is string s)
{
Header = headerPrefix;
Value = s;
return;
}
var type = o.GetType();
var e = GetEnumerable(o, type);
if (e != null)
{
if (isMaxDepth)
{
InitializeEnumerableHeaderOnly(headerPrefix, e);
}
else
{
var members = GetMembers(type);
if (IsSpecialEnumerable(type, members))
{
PopulateChildren(o, targetQuotas, members, headerPrefix);
var enumerable = new ResultObject(o, targetQuotas, headerPrefix);
enumerable.InitializeEnumerable(headerPrefix, e, targetQuotas);
Children = Children.Concat(new[] { enumerable }).ToList();
}
else
{
InitializeEnumerable(headerPrefix, e, targetQuotas);
}
}
return;
}
if (isMaxDepth)
{
Header = headerPrefix;
Value = GetString(o);
return;
}
PopulateChildren(o, targetQuotas, GetMembers(type), headerPrefix);
}
private static MemberInfo[] GetMembers(Type type) => ((IEnumerable<MemberInfo>)type.GetRuntimeProperties()
.Where(m => m.GetMethod?.IsPublic == true && !m.GetMethod.IsStatic))
.Concat(type.GetRuntimeFields().Where(m => m.IsPublic && !m.IsStatic))
.OrderBy(m => m.Name)
.ToArray();
private static IEnumerable? GetEnumerable(object o, Type type) =>
o is IEnumerable e && !s_doNotTreatAsEnumerableTypeNames.Contains(type.Name) ? e : null;
private bool TryPopulateMember(object? o, DumpQuotas targetQuotas)
{
if (_member == null)
{
return false;
}
object? value;
try
{
if (o is Exception exception)
{
if (_member.Name == nameof(Exception.StackTrace))
{
value = exception.StackTrace;
}
else
{
value = GetMemberValue(o);
if (_member.Name == "TargetSite")
{
targetQuotas = targetQuotas.WithMaxDepth(0);
}
}
}
else
{
value = GetMemberValue(o);
}
}
catch (TargetInvocationException exception)
{
Header = _member.Name;
Value = $"Threw {exception.InnerException.GetType().Name}";
Children = new List<ResultObject> { ExceptionResultObject.Create(exception.InnerException, _quotas) };
return true;
}
if (value == null)
{
if (_member is PropertyInfo propertyInfo)
{
SetType(propertyInfo.PropertyType);
}
else if (_member is FieldInfo fieldInfo)
{
SetType(fieldInfo.FieldType);
}
}
PopulateObject(value, _member.Name, targetQuotas);
return true;
}
private object? GetMemberValue(object? o)
{
object? value = null;
try
{
if (_member is PropertyInfo propertyInfo)
{
if (propertyInfo.GetIndexParameters().Length == 0)
{
value = propertyInfo.GetValue(o);
}
}
else if (_member is FieldInfo fieldInfo)
{
value = fieldInfo.GetValue(o);
}
}
catch (Exception ex)
{
return ex is TargetInvocationException tiex ? tiex.InnerException : ex;
}
return value;
}
private void SetType(object o)
{
if (o == null)
{
return;
}
var type = o.GetType();
SetType(type);
}
private void SetType(Type type) => Type = GetTypeName(type);
private static string GetTypeName(Type type)
{
var ns = type.Namespace;
string? typeName = null;
do
{
var currentName = GetSimpleTypeName(type);
typeName = typeName != null ? currentName + "+" + typeName : currentName;
type = type.DeclaringType;
} while (type != null);
typeName = $"{typeName} ({ns})";
return typeName;
}
private static string GetSimpleTypeName(Type type)
{
var typeName = type.Name;
if (type.IsConstructedGenericType)
{
var separatorIndex = typeName.IndexOf('`');
if (separatorIndex > 0)
{
typeName = typeName.Substring(0, separatorIndex);
}
typeName += "<" + string.Join(", ", type.GenericTypeArguments.Select(GetSimpleTypeName)) + ">";
}
return typeName;
}
private void PopulateChildren(object o, in DumpQuotas targetQuotas, IEnumerable<MemberInfo> properties, string? headerPrefix)
{
Header = headerPrefix;
Value = GetString(o);
if (o == null)
{
return;
}
var children = new List<ResultObject>();
foreach (var property in properties)
{
children.Add(new ResultObject(o, targetQuotas, member: property));
}
Children = children;
}
private void InitializeEnumerableHeaderOnly(string? headerPrefix, IEnumerable e)
{
Header = headerPrefix;
try
{
var count = 0;
var enumerator = e.GetEnumerator();
using (enumerator as IDisposable)
{
while (count < _quotas.MaxEnumerableLength && enumerator.MoveNext())
{
++count;
}
var hasMore = enumerator.MoveNext() ? "+" : "";
Value = $"<enumerable Count: {count}{hasMore}>";
}
}
catch (Exception exception)
{
Header = _member?.Name;
Value = $"Threw {exception.GetType().Name}";
Children = new List<ResultObject> { ExceptionResultObject.Create(exception, _quotas) };
}
}
private void InitializeEnumerable(string? headerPrefix, IEnumerable e, in DumpQuotas targetQuotas)
{
try
{
Header = headerPrefix;
var items = new List<ResultObject>();
var type = e.GetType().GetTypeInfo();
var enumerableInterface = type.ImplementedInterfaces
.FirstOrDefault(x => x.IsConstructedGenericType && x.GetGenericTypeDefinition() == typeof(IEnumerable<>));
var enumerableType = enumerableInterface?.GenericTypeArguments[0] ?? typeof(object);
var enumerableTypeName = GetTypeName(enumerableType);
var enumerator = e.GetEnumerator();
using (enumerator as IDisposable)
{
var index = 0;
while (index < _quotas.MaxEnumerableLength && enumerator.MoveNext())
{
var item = new ResultObject(enumerator.Current, targetQuotas, $"[{index}]");
if (item.Type == null)
{
item.Type = enumerableTypeName;
}
items.Add(item);
++index;
}
var hasMore = enumerator.MoveNext() ? "+" : "";
var groupingInterface = type.ImplementedInterfaces
.FirstOrDefault(x => x.IsConstructedGenericType &&
x.GetGenericTypeDefinition() == typeof(IGrouping<,>));
Value = groupingInterface != null
? $"<grouping Count: {items.Count}{hasMore} Key: {groupingInterface.GetRuntimeProperty("Key").GetValue(e)}>"
: $"<enumerable Count: {items.Count}{hasMore}>";
Children = items;
}
}
catch (Exception exception)
{
Header = _member?.Name;
Value = $"Threw {exception.GetType().Name}";
Children = new List<ResultObject> { ExceptionResultObject.Create(exception, _quotas) };
}
}
private static bool IsSpecialEnumerable(Type t, IEnumerable<MemberInfo> members) => members.Any(p => !s_irrelevantEnumerableProperties.Contains(p.Name))
&& !typeof(IEnumerator).IsAssignableFrom(t)
&& !t.IsArray
&& t.Namespace?.StartsWith("System.Collections", StringComparison.Ordinal) != true
&& t.Namespace?.StartsWith("System.Linq", StringComparison.Ordinal) != true
&& t.Name.IndexOf("Collection", StringComparison.Ordinal) < 0
&& !t.Name.Equals("JArray", StringComparison.Ordinal);
private string GetString(object o)
{
if (o is Exception exception)
{
return exception.Message;
}
var typeName = o?.GetType().Name;
if (typeName != null && s_toStringAlternatives.TryGetValue(typeName, out var value))
{
return value;
}
var s = o + string.Empty;
return s.Length > _quotas.MaxStringLength ? s.Substring(0, _quotas.MaxStringLength) : s;
}
}
internal class ExceptionResultObject : ResultObject
{
private ExceptionResultObject(Exception exception, in DumpQuotas quotas) : base(exception, quotas)
{
Message = exception.Message;
var stackFrames = new StackTrace(exception, fNeedFileInfo: true).GetFrames() ?? Array.Empty<StackFrame>();
foreach (var stackFrame in stackFrames)
{
if (string.IsNullOrWhiteSpace(stackFrame.GetFileName()) &&
stackFrame.GetFileLineNumber() is var lineNumber && lineNumber > 0)
{
LineNumber = lineNumber;
break;
}
}
}
public static ExceptionResultObject Create(Exception exception, DumpQuotas? quotas = null) => new(exception, quotas ?? DumpQuotas.Default);
public int LineNumber { get; private set; }
public string Message { get; private set; }
}
internal class InputReadRequest
{
}
internal class ProgressResultObject
{
private ProgressResultObject(double? progress) => Progress = progress;
public static ProgressResultObject Create(double? progress) => new(progress);
public double? Progress { get; }
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace TestAppHarbor.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
namespace Lucene.Net.Search
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// for javadocs
// for javadocs
// for javadocs
// for javadocs
// for javadocs
using System; // for javadocs
/// <summary>
/// A <seealso cref="Filter"/> that only accepts numeric values within
/// a specified range. To use this, you must first index the
/// numeric values using <seealso cref="IntField"/>, {@link
/// FloatField}, <seealso cref="LongField"/> or <seealso cref="DoubleField"/> (expert: {@link
/// NumericTokenStream}).
///
/// <p>You create a new NumericRangeFilter with the static
/// factory methods, eg:
///
/// <pre class="prettyprint">
/// Filter f = NumericRangeFilter.newFloatRange("weight", 0.03f, 0.10f, true, true);
/// </pre>
///
/// accepts all documents whose float valued "weight" field
/// ranges from 0.03 to 0.10, inclusive.
/// See <seealso cref="NumericRangeQuery"/> for details on how Lucene
/// indexes and searches numeric valued fields.
///
/// @since 2.9
///
/// </summary>
public sealed class NumericRangeFilter<T> : MultiTermQueryWrapperFilter<NumericRangeQuery<T>>
where T : struct, IComparable<T>
// real numbers in C# are structs and IComparable with themselves, best constraint we have
{
internal NumericRangeFilter(NumericRangeQuery<T> query)
: base(query)
{
}
/// <summary>
/// Returns <code>true</code> if the lower endpoint is inclusive </summary>
public bool IncludesMin()
{
return Query.IncludesMin();
}
/// <summary>
/// Returns <code>true</code> if the upper endpoint is inclusive </summary>
public bool IncludesMax()
{
return Query.IncludesMax();
}
/// <summary>
/// Returns the lower value of this range filter </summary>
public T? Min
{
get
{
return Query.Min;
}
}
/// <summary>
/// Returns the upper value of this range filter </summary>
public T? Max
{
get
{
return Query.Max;
}
}
/// <summary>
/// Returns the precision step. </summary>
public int PrecisionStep
{
get
{
return Query.PrecisionStep;
}
}
}
public static class NumericRangeFilter
{
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that filters a <code>long</code>
/// range using the given <a href="NumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>.
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<long> NewLongRange(string field, int precisionStep, long? min, long? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<long>(NumericRangeQuery.NewLongRange(field, precisionStep, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that queries a <code>long</code>
/// range using the default <code>precisionStep</code> <seealso cref="NumericUtils#PRECISION_STEP_DEFAULT"/> (4).
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<long> NewLongRange(string field, long? min, long? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<long>(NumericRangeQuery.NewLongRange(field, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that filters a <code>int</code>
/// range using the given <a href="NumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>.
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<int> NewIntRange(string field, int precisionStep, int? min, int? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<int>(NumericRangeQuery.NewIntRange(field, precisionStep, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that queries a <code>int</code>
/// range using the default <code>precisionStep</code> <seealso cref="NumericUtils#PRECISION_STEP_DEFAULT"/> (4).
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<int> NewIntRange(string field, int? min, int? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<int>(NumericRangeQuery.NewIntRange(field, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that filters a <code>double</code>
/// range using the given <a href="NumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>.
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>.
/// <seealso cref="Double#NaN"/> will never match a half-open range, to hit {@code NaN} use a query
/// with {@code min == max == Double.NaN}. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<double> NewDoubleRange(string field, int precisionStep, double? min, double? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<double>(NumericRangeQuery.NewDoubleRange(field, precisionStep, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that queries a <code>double</code>
/// range using the default <code>precisionStep</code> <seealso cref="NumericUtils#PRECISION_STEP_DEFAULT"/> (4).
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>.
/// <seealso cref="Double#NaN"/> will never match a half-open range, to hit {@code NaN} use a query
/// with {@code min == max == Double.NaN}. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<double> NewDoubleRange(string field, double? min, double? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<double>(NumericRangeQuery.NewDoubleRange(field, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that filters a <code>float</code>
/// range using the given <a href="NumericRangeQuery.html#precisionStepDesc"><code>precisionStep</code></a>.
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>.
/// <seealso cref="Float#NaN"/> will never match a half-open range, to hit {@code NaN} use a query
/// with {@code min == max == Float.NaN}. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<float> NewFloatRange(string field, int precisionStep, float? min, float? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<float>(NumericRangeQuery.NewFloatRange(field, precisionStep, min, max, minInclusive, maxInclusive));
}
/// <summary>
/// Factory that creates a <code>NumericRangeFilter</code>, that queries a <code>float</code>
/// range using the default <code>precisionStep</code> <seealso cref="NumericUtils#PRECISION_STEP_DEFAULT"/> (4).
/// You can have half-open ranges (which are in fact </<= or >/>= queries)
/// by setting the min or max value to <code>null</code>.
/// <seealso cref="Float#NaN"/> will never match a half-open range, to hit {@code NaN} use a query
/// with {@code min == max == Float.NaN}. By setting inclusive to false, it will
/// match all documents excluding the bounds, with inclusive on, the boundaries are hits, too.
/// </summary>
public static NumericRangeFilter<float> NewFloatRange(string field, float? min, float? max, bool minInclusive, bool maxInclusive)
{
return new NumericRangeFilter<float>(NumericRangeQuery.NewFloatRange(field, min, max, minInclusive, maxInclusive));
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Orleans.Runtime;
using Orleans.Runtime.Configuration;
using Orleans.Streams;
using Microsoft.Extensions.Logging;
namespace Orleans
{
/// <summary>
/// Client for communicating with clusters of Orleans silos.
/// </summary>
internal class ClusterClient : IInternalClusterClient
{
private readonly OutsideRuntimeClient runtimeClient;
private readonly AsyncLock initLock = new AsyncLock();
private LifecycleState state = LifecycleState.Created;
private readonly LoggerWrapper appLogger;
private enum LifecycleState
{
Invalid,
Created,
Starting,
Started,
Disposing,
Disposed,
}
/// <summary>
/// Initializes a new instance of the <see cref="ClusterClient"/> class.
/// </summary>
/// <param name="runtimeClient">The runtime client.</param>
/// <param name="configuration">The client configuration.</param>
/// <param name="loggerFactory">Logger factory used to create loggers</param>
public ClusterClient(OutsideRuntimeClient runtimeClient, ClientConfiguration configuration, ILoggerFactory loggerFactory)
{
this.Configuration = configuration;
this.runtimeClient = runtimeClient;
//set PropagateActivityId flag from node cofnig
RequestContext.PropagateActivityId = configuration.PropagateActivityId;
this.appLogger = new LoggerWrapper("Application", loggerFactory);
}
/// <inheritdoc />
public bool IsInitialized => this.state == LifecycleState.Started;
/// <inheritdoc />
public IGrainFactory GrainFactory => this.InternalGrainFactory;
/// <inheritdoc />
public Logger Logger
{
get
{
this.ThrowIfDisposedOrNotInitialized();
return this.appLogger;
}
}
/// <inheritdoc />
public IServiceProvider ServiceProvider => this.runtimeClient.ServiceProvider;
/// <inheritdoc />
public ClientConfiguration Configuration { get; }
/// <inheritdoc />
IStreamProviderRuntime IInternalClusterClient.StreamProviderRuntime => this.runtimeClient.CurrentStreamProviderRuntime;
/// <inheritdoc />
private IInternalGrainFactory InternalGrainFactory
{
get
{
this.ThrowIfDisposedOrNotInitialized();
return this.runtimeClient.InternalGrainFactory;
}
}
/// <summary>
/// Gets a value indicating whether or not this instance is being disposed.
/// </summary>
private bool IsDisposing => this.state == LifecycleState.Disposed ||
this.state == LifecycleState.Disposing;
/// <inheritdoc />
public IEnumerable<IStreamProvider> GetStreamProviders()
{
this.ThrowIfDisposedOrNotInitialized();
return this.runtimeClient.CurrentStreamProviderManager.GetStreamProviders();
}
/// <inheritdoc />
public IStreamProvider GetStreamProvider(string name)
{
this.ThrowIfDisposedOrNotInitialized();
if (string.IsNullOrWhiteSpace(name))
{
throw new ArgumentNullException(nameof(name));
}
return this.runtimeClient.CurrentStreamProviderManager.GetProvider(name) as IStreamProvider;
}
/// <inheritdoc />
public async Task Connect()
{
this.ThrowIfDisposedOrAlreadyInitialized();
using (await this.initLock.LockAsync().ConfigureAwait(false))
{
this.ThrowIfDisposedOrAlreadyInitialized();
if (this.state == LifecycleState.Starting)
{
throw new InvalidOperationException("A prior connection attempt failed. This instance must be disposed.");
}
this.state = LifecycleState.Starting;
await this.runtimeClient.Start().ConfigureAwait(false);
this.state = LifecycleState.Started;
}
}
/// <inheritdoc />
public Task Close() => this.Stop(gracefully: true);
/// <inheritdoc />
public void Abort()
{
this.Stop(gracefully: false).GetAwaiter().GetResult();
}
private async Task Stop(bool gracefully)
{
if (this.IsDisposing) return;
using (await this.initLock.LockAsync().ConfigureAwait(false))
{
if (this.state == LifecycleState.Disposed) return;
try
{
this.state = LifecycleState.Disposing;
if (gracefully)
{
Utils.SafeExecute(() => this.runtimeClient.Disconnect());
}
Utils.SafeExecute(() => this.runtimeClient.Reset(gracefully));
this.Dispose(true);
}
finally
{
// If disposal failed, the system is in an invalid state.
if (this.state == LifecycleState.Disposing) this.state = LifecycleState.Invalid;
}
}
}
/// <inheritdoc />
void IDisposable.Dispose() => this.Abort();
/// <inheritdoc />
public TGrainInterface GetGrain<TGrainInterface>(Guid primaryKey, string grainClassNamePrefix = null)
where TGrainInterface : IGrainWithGuidKey
{
return this.InternalGrainFactory.GetGrain<TGrainInterface>(primaryKey, grainClassNamePrefix);
}
/// <inheritdoc />
public TGrainInterface GetGrain<TGrainInterface>(long primaryKey, string grainClassNamePrefix = null)
where TGrainInterface : IGrainWithIntegerKey
{
return this.InternalGrainFactory.GetGrain<TGrainInterface>(primaryKey, grainClassNamePrefix);
}
/// <inheritdoc />
public TGrainInterface GetGrain<TGrainInterface>(string primaryKey, string grainClassNamePrefix = null)
where TGrainInterface : IGrainWithStringKey
{
return this.InternalGrainFactory.GetGrain<TGrainInterface>(primaryKey, grainClassNamePrefix);
}
/// <inheritdoc />
public TGrainInterface GetGrain<TGrainInterface>(Guid primaryKey, string keyExtension, string grainClassNamePrefix = null)
where TGrainInterface : IGrainWithGuidCompoundKey
{
return this.InternalGrainFactory.GetGrain<TGrainInterface>(primaryKey, keyExtension, grainClassNamePrefix);
}
/// <inheritdoc />
public TGrainInterface GetGrain<TGrainInterface>(long primaryKey, string keyExtension, string grainClassNamePrefix = null)
where TGrainInterface : IGrainWithIntegerCompoundKey
{
return this.InternalGrainFactory.GetGrain<TGrainInterface>(primaryKey, keyExtension, grainClassNamePrefix);
}
/// <inheritdoc />
public Task<TGrainObserverInterface> CreateObjectReference<TGrainObserverInterface>(IGrainObserver obj)
where TGrainObserverInterface : IGrainObserver
{
return ((IGrainFactory) this.runtimeClient.InternalGrainFactory).CreateObjectReference<TGrainObserverInterface>(obj);
}
/// <inheritdoc />
public Task DeleteObjectReference<TGrainObserverInterface>(IGrainObserver obj) where TGrainObserverInterface : IGrainObserver
{
return this.InternalGrainFactory.DeleteObjectReference<TGrainObserverInterface>(obj);
}
/// <inheritdoc />
public void BindGrainReference(IAddressable grain)
{
this.InternalGrainFactory.BindGrainReference(grain);
}
/// <inheritdoc />
public TGrainObserverInterface CreateObjectReference<TGrainObserverInterface>(IAddressable obj)
where TGrainObserverInterface : IAddressable
{
return this.InternalGrainFactory.CreateObjectReference<TGrainObserverInterface>(obj);
}
/// <inheritdoc />
TGrainInterface IInternalGrainFactory.GetSystemTarget<TGrainInterface>(GrainId grainId, SiloAddress destination)
{
return this.InternalGrainFactory.GetSystemTarget<TGrainInterface>(grainId, destination);
}
/// <inheritdoc />
TGrainInterface IInternalGrainFactory.Cast<TGrainInterface>(IAddressable grain)
{
return this.InternalGrainFactory.Cast<TGrainInterface>(grain);
}
/// <inheritdoc />
object IInternalGrainFactory.Cast(IAddressable grain, Type interfaceType)
{
return this.InternalGrainFactory.Cast(grain, interfaceType);
}
/// <inheritdoc />
TGrainInterface IInternalGrainFactory.GetGrain<TGrainInterface>(GrainId grainId)
{
return this.InternalGrainFactory.GetGrain<TGrainInterface>(grainId);
}
/// <inheritdoc />
GrainReference IInternalGrainFactory.GetGrain(GrainId grainId, string genericArguments)
{
return this.InternalGrainFactory.GetGrain(grainId, genericArguments);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Usage", "CA2213:DisposableFieldsShouldBeDisposed")]
private void Dispose(bool disposing)
{
if (disposing)
{
Utils.SafeExecute(() => this.runtimeClient.Dispose());
this.state = LifecycleState.Disposed;
}
GC.SuppressFinalize(this);
}
private void ThrowIfDisposedOrNotInitialized()
{
this.ThrowIfDisposed();
if (!this.IsInitialized) throw new InvalidOperationException("Client is not initialized.");
}
private void ThrowIfDisposedOrAlreadyInitialized()
{
this.ThrowIfDisposed();
if (this.IsInitialized) throw new InvalidOperationException("Client is already initialized.");
}
private void ThrowIfDisposed()
{
if (this.IsDisposing)
throw new ObjectDisposedException(
nameof(ClusterClient),
$"Client has been disposed either by a call to {nameof(Dispose)} or because it has been stopped.");
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
namespace Lucene.Net.Search
{
using Lucene.Net.Support;
using Directory = Lucene.Net.Store.Directory;
using IndexReader = Lucene.Net.Index.IndexReader;
using IndexWriter = Lucene.Net.Index.IndexWriter;
using IndexWriterConfig = Lucene.Net.Index.IndexWriterConfig;
using LineFileDocs = Lucene.Net.Util.LineFileDocs;
using LuceneTestCase = Lucene.Net.Util.LuceneTestCase;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer;
using PrintStreamInfoStream = Lucene.Net.Util.PrintStreamInfoStream;
using Term = Lucene.Net.Index.Term;
using TermContext = Lucene.Net.Index.TermContext;
using TestUtil = Lucene.Net.Util.TestUtil;
// TODO
// - doc blocks? so we can test joins/grouping...
// - controlled consistency (NRTMgr)
/// <summary>
/// Base test class for simulating distributed search across multiple shards.
/// </summary>
public abstract class ShardSearchingTestBase : LuceneTestCase
{
// TODO: maybe SLM should throw this instead of returning null...
/// <summary>
/// Thrown when the lease for a searcher has expired.
/// </summary>
public class SearcherExpiredException : Exception
{
public SearcherExpiredException(string message)
: base(message)
{
}
}
internal class FieldAndShardVersion
{
internal readonly long Version;
internal readonly int NodeID;
internal readonly string Field;
public FieldAndShardVersion(int nodeID, long version, string field)
{
this.NodeID = nodeID;
this.Version = version;
this.Field = field;
}
public override int GetHashCode()
{
return (int)(Version * NodeID + Field.GetHashCode());
}
public override bool Equals(object _other)
{
if (!(_other is FieldAndShardVersion))
{
return false;
}
FieldAndShardVersion other = (FieldAndShardVersion)_other;
return Field.Equals(other.Field) && Version == other.Version && NodeID == other.NodeID;
}
public override string ToString()
{
return "FieldAndShardVersion(field=" + Field + " nodeID=" + NodeID + " version=" + Version + ")";
}
}
internal class TermAndShardVersion
{
internal readonly long Version;
internal readonly int NodeID;
internal readonly Term Term;
public TermAndShardVersion(int nodeID, long version, Term term)
{
this.NodeID = nodeID;
this.Version = version;
this.Term = term;
}
public override int GetHashCode()
{
return (int)(Version * NodeID + Term.GetHashCode());
}
public override bool Equals(object _other)
{
if (!(_other is TermAndShardVersion))
{
return false;
}
TermAndShardVersion other = (TermAndShardVersion)_other;
return Term.Equals(other.Term) && Version == other.Version && NodeID == other.NodeID;
}
}
// We share collection stats for these fields on each node
// reopen:
private readonly string[] FieldsToShare = new string[] { "body", "title" };
// Called by one node once it has reopened, to notify all
// other nodes. this is just a mock (since it goes and
// directly updates all other nodes, in RAM)... in a real
// env this would hit the wire, sending version &
// collection stats to all other nodes:
internal virtual void BroadcastNodeReopen(int nodeID, long version, IndexSearcher newSearcher)
{
if (VERBOSE)
{
Console.WriteLine("REOPEN: nodeID=" + nodeID + " version=" + version + " maxDoc=" + newSearcher.IndexReader.MaxDoc);
}
// Broadcast new collection stats for this node to all
// other nodes:
foreach (string field in FieldsToShare)
{
CollectionStatistics stats = newSearcher.CollectionStatistics(field);
foreach (NodeState node in Nodes)
{
// Don't put my own collection stats into the cache;
// we pull locally:
if (node.MyNodeID != nodeID)
{
node.CollectionStatsCache[new FieldAndShardVersion(nodeID, version, field)] = stats;
}
}
}
foreach (NodeState node in Nodes)
{
node.UpdateNodeVersion(nodeID, version);
}
}
// TODO: broadcastNodeExpire? then we can purge the
// known-stale cache entries...
// MOCK: in a real env you have to hit the wire
// (send this query to all remote nodes
// concurrently):
internal virtual TopDocs SearchNode(int nodeID, long[] nodeVersions, Query q, Sort sort, int numHits, ScoreDoc searchAfter)
{
NodeState.ShardIndexSearcher s = Nodes[nodeID].Acquire(nodeVersions);
try
{
if (sort == null)
{
if (searchAfter != null)
{
return s.LocalSearchAfter(searchAfter, q, numHits);
}
else
{
return s.LocalSearch(q, numHits);
}
}
else
{
Debug.Assert(searchAfter == null); // not supported yet
return s.LocalSearch(q, numHits, sort);
}
}
finally
{
Nodes[nodeID].Release(s);
}
}
// Mock: in a real env, this would hit the wire and get
// term stats from remote node
internal virtual IDictionary<Term, TermStatistics> GetNodeTermStats(ISet<Term> terms, int nodeID, long version)
{
NodeState node = Nodes[nodeID];
IDictionary<Term, TermStatistics> stats = new Dictionary<Term, TermStatistics>();
IndexSearcher s = node.Searchers.Acquire(version);
if (s == null)
{
throw new SearcherExpiredException("node=" + nodeID + " version=" + version);
}
try
{
foreach (Term term in terms)
{
TermContext termContext = TermContext.Build(s.IndexReader.Context, term);
stats[term] = s.TermStatistics(term, termContext);
}
}
finally
{
node.Searchers.Release(s);
}
return stats;
}
protected internal sealed class NodeState : IDisposable
{
private readonly ShardSearchingTestBase OuterInstance;
public readonly Directory Dir;
public readonly IndexWriter Writer;
public readonly SearcherLifetimeManager Searchers;
public readonly SearcherManager Mgr;
public readonly int MyNodeID;
public readonly long[] CurrentNodeVersions;
// TODO: nothing evicts from here!!! Somehow, on searcher
// expiration on remote nodes we must evict from our
// local cache...? And still LRU otherwise (for the
// still-live searchers).
internal readonly IDictionary<FieldAndShardVersion, CollectionStatistics> CollectionStatsCache = new ConcurrentDictionary<FieldAndShardVersion, CollectionStatistics>();
internal readonly IDictionary<TermAndShardVersion, TermStatistics> TermStatsCache = new ConcurrentDictionary<TermAndShardVersion, TermStatistics>();
/// <summary>
/// Matches docs in the local shard but scores based on
/// aggregated stats ("mock distributed scoring") from all
/// nodes.
/// </summary>
public class ShardIndexSearcher : IndexSearcher
{
private readonly ShardSearchingTestBase.NodeState OuterInstance;
// Version for the node searchers we search:
public readonly long[] NodeVersions;
public readonly int MyNodeID;
public ShardIndexSearcher(ShardSearchingTestBase.NodeState outerInstance, long[] nodeVersions, IndexReader localReader, int nodeID)
: base(localReader)
{
this.OuterInstance = outerInstance;
this.NodeVersions = nodeVersions;
MyNodeID = nodeID;
Debug.Assert(MyNodeID == outerInstance.MyNodeID, "myNodeID=" + nodeID + " NodeState.this.myNodeID=" + outerInstance.MyNodeID);
}
public override Query Rewrite(Query original)
{
Query rewritten = base.Rewrite(original);
HashSet<Term> terms = new HashSet<Term>();
rewritten.ExtractTerms(terms);
// Make a single request to remote nodes for term
// stats:
for (int nodeID = 0; nodeID < NodeVersions.Length; nodeID++)
{
if (nodeID == MyNodeID)
{
continue;
}
HashSet<Term> missing = new HashSet<Term>();
foreach (Term term in terms)
{
TermAndShardVersion key = new TermAndShardVersion(nodeID, NodeVersions[nodeID], term);
if (!OuterInstance.TermStatsCache.ContainsKey(key))
{
missing.Add(term);
}
}
if (missing.Count != 0)
{
foreach (KeyValuePair<Term, TermStatistics> ent in OuterInstance.OuterInstance.GetNodeTermStats(missing, nodeID, NodeVersions[nodeID]))
{
TermAndShardVersion key = new TermAndShardVersion(nodeID, NodeVersions[nodeID], ent.Key);
OuterInstance.TermStatsCache[key] = ent.Value;
}
}
}
return rewritten;
}
public override TermStatistics TermStatistics(Term term, TermContext context)
{
Debug.Assert(term != null);
long docFreq = 0;
long totalTermFreq = 0;
for (int nodeID = 0; nodeID < NodeVersions.Length; nodeID++)
{
TermStatistics subStats;
if (nodeID == MyNodeID)
{
subStats = base.TermStatistics(term, context);
}
else
{
TermAndShardVersion key = new TermAndShardVersion(nodeID, NodeVersions[nodeID], term);
subStats = OuterInstance.TermStatsCache[key];
// We pre-cached during rewrite so all terms
// better be here...
Debug.Assert(subStats != null);
}
long nodeDocFreq = subStats.DocFreq();
if (docFreq >= 0 && nodeDocFreq >= 0)
{
docFreq += nodeDocFreq;
}
else
{
docFreq = -1;
}
long nodeTotalTermFreq = subStats.TotalTermFreq();
if (totalTermFreq >= 0 && nodeTotalTermFreq >= 0)
{
totalTermFreq += nodeTotalTermFreq;
}
else
{
totalTermFreq = -1;
}
}
return new TermStatistics(term.Bytes, docFreq, totalTermFreq);
}
public override CollectionStatistics CollectionStatistics(string field)
{
// TODO: we could compute this on init and cache,
// since we are re-inited whenever any nodes have a
// new reader
long docCount = 0;
long sumTotalTermFreq = 0;
long sumDocFreq = 0;
long maxDoc = 0;
for (int nodeID = 0; nodeID < NodeVersions.Length; nodeID++)
{
FieldAndShardVersion key = new FieldAndShardVersion(nodeID, NodeVersions[nodeID], field);
CollectionStatistics nodeStats;
if (nodeID == MyNodeID)
{
nodeStats = base.CollectionStatistics(field);
}
else
{
nodeStats = OuterInstance.CollectionStatsCache[key];
}
if (nodeStats == null)
{
Console.WriteLine("coll stats myNodeID=" + MyNodeID + ": " + OuterInstance.CollectionStatsCache.Keys);
}
// Collection stats are pre-shared on reopen, so,
// we better not have a cache miss:
Debug.Assert(nodeStats != null, "myNodeID=" + MyNodeID + " nodeID=" + nodeID + " version=" + NodeVersions[nodeID] + " field=" + field);
long nodeDocCount = nodeStats.DocCount();
if (docCount >= 0 && nodeDocCount >= 0)
{
docCount += nodeDocCount;
}
else
{
docCount = -1;
}
long nodeSumTotalTermFreq = nodeStats.SumTotalTermFreq();
if (sumTotalTermFreq >= 0 && nodeSumTotalTermFreq >= 0)
{
sumTotalTermFreq += nodeSumTotalTermFreq;
}
else
{
sumTotalTermFreq = -1;
}
long nodeSumDocFreq = nodeStats.SumDocFreq();
if (sumDocFreq >= 0 && nodeSumDocFreq >= 0)
{
sumDocFreq += nodeSumDocFreq;
}
else
{
sumDocFreq = -1;
}
Debug.Assert(nodeStats.MaxDoc >= 0);
maxDoc += nodeStats.MaxDoc;
}
return new CollectionStatistics(field, maxDoc, docCount, sumTotalTermFreq, sumDocFreq);
}
public override TopDocs Search(Query query, int numHits)
{
TopDocs[] shardHits = new TopDocs[NodeVersions.Length];
for (int nodeID = 0; nodeID < NodeVersions.Length; nodeID++)
{
if (nodeID == MyNodeID)
{
// My node; run using local shard searcher we
// already aquired:
shardHits[nodeID] = LocalSearch(query, numHits);
}
else
{
shardHits[nodeID] = OuterInstance.OuterInstance.SearchNode(nodeID, NodeVersions, query, null, numHits, null);
}
}
// Merge:
return TopDocs.Merge(null, numHits, shardHits);
}
public virtual TopDocs LocalSearch(Query query, int numHits)
{
return base.Search(query, numHits);
}
public override TopDocs SearchAfter(ScoreDoc after, Query query, int numHits)
{
TopDocs[] shardHits = new TopDocs[NodeVersions.Length];
// results are merged in that order: score, shardIndex, doc. therefore we set
// after to after.Score and depending on the nodeID we set doc to either:
// - not collect any more documents with that score (only with worse score)
// - collect more documents with that score (and worse) following the last collected document
// - collect all documents with that score (and worse)
ScoreDoc shardAfter = new ScoreDoc(after.Doc, after.Score);
for (int nodeID = 0; nodeID < NodeVersions.Length; nodeID++)
{
if (nodeID < after.ShardIndex)
{
// all documents with after.Score were already collected, so collect
// only documents with worse scores.
NodeState.ShardIndexSearcher s = OuterInstance.OuterInstance.Nodes[nodeID].Acquire(NodeVersions);
try
{
// Setting after.Doc to reader.MaxDoc-1 is a way to tell
// TopScoreDocCollector that no more docs with that score should
// be collected. note that in practice the shard which sends the
// request to a remote shard won't have reader.MaxDoc at hand, so
// it will send some arbitrary value which will be fixed on the
// other end.
shardAfter.Doc = s.IndexReader.MaxDoc - 1;
}
finally
{
OuterInstance.OuterInstance.Nodes[nodeID].Release(s);
}
}
else if (nodeID == after.ShardIndex)
{
// collect all documents following the last collected doc with
// after.Score + documents with worse scores.
shardAfter.Doc = after.Doc;
}
else
{
// all documents with after.Score (and worse) should be collected
// because they didn't make it to top-N in the previous round.
shardAfter.Doc = -1;
}
if (nodeID == MyNodeID)
{
// My node; run using local shard searcher we
// already aquired:
shardHits[nodeID] = LocalSearchAfter(shardAfter, query, numHits);
}
else
{
shardHits[nodeID] = OuterInstance.OuterInstance.SearchNode(nodeID, NodeVersions, query, null, numHits, shardAfter);
}
//System.out.println(" node=" + nodeID + " totHits=" + shardHits[nodeID].TotalHits);
}
// Merge:
return TopDocs.Merge(null, numHits, shardHits);
}
public virtual TopDocs LocalSearchAfter(ScoreDoc after, Query query, int numHits)
{
return base.SearchAfter(after, query, numHits);
}
public override TopFieldDocs Search(Query query, int numHits, Sort sort)
{
Debug.Assert(sort != null);
TopDocs[] shardHits = new TopDocs[NodeVersions.Length];
for (int nodeID = 0; nodeID < NodeVersions.Length; nodeID++)
{
if (nodeID == MyNodeID)
{
// My node; run using local shard searcher we
// already aquired:
shardHits[nodeID] = LocalSearch(query, numHits, sort);
}
else
{
shardHits[nodeID] = OuterInstance.OuterInstance.SearchNode(nodeID, NodeVersions, query, sort, numHits, null);
}
}
// Merge:
return (TopFieldDocs)TopDocs.Merge(sort, numHits, shardHits);
}
public virtual TopFieldDocs LocalSearch(Query query, int numHits, Sort sort)
{
return base.Search(query, numHits, sort);
}
}
internal volatile ShardIndexSearcher CurrentShardSearcher;
public NodeState(ShardSearchingTestBase outerInstance, Random random, int nodeID, int numNodes)
{
this.OuterInstance = outerInstance;
MyNodeID = nodeID;
Dir = NewFSDirectory(CreateTempDir("ShardSearchingTestBase"));
// TODO: set warmer
MockAnalyzer analyzer = new MockAnalyzer(Random());
analyzer.MaxTokenLength = TestUtil.NextInt(Random(), 1, IndexWriter.MAX_TERM_LENGTH);
IndexWriterConfig iwc = new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer);
iwc.SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE);
if (VERBOSE)
{
iwc.InfoStream = new PrintStreamInfoStream(Console.Out);
}
Writer = new IndexWriter(Dir, iwc);
Mgr = new SearcherManager(Writer, true, null);
Searchers = new SearcherLifetimeManager();
// Init w/ 0s... caller above will do initial
// "broadcast" by calling initSearcher:
CurrentNodeVersions = new long[numNodes];
}
public void InitSearcher(long[] nodeVersions)
{
Debug.Assert(CurrentShardSearcher == null);
Array.Copy(nodeVersions, 0, CurrentNodeVersions, 0, CurrentNodeVersions.Length);
CurrentShardSearcher = new ShardIndexSearcher(this, (long[])CurrentNodeVersions.Clone(), Mgr.Acquire().IndexReader, MyNodeID);
}
public void UpdateNodeVersion(int nodeID, long version)
{
CurrentNodeVersions[nodeID] = version;
if (CurrentShardSearcher != null)
{
CurrentShardSearcher.IndexReader.DecRef();
}
CurrentShardSearcher = new ShardIndexSearcher(this, (long[])CurrentNodeVersions.Clone(), Mgr.Acquire().IndexReader, MyNodeID);
}
// Get the current (fresh) searcher for this node
public ShardIndexSearcher Acquire()
{
while (true)
{
ShardIndexSearcher s = CurrentShardSearcher;
// In theory the reader could get decRef'd to 0
// before we have a chance to incRef, ie if a reopen
// happens right after the above line, this thread
// gets stalled, and the old IR is closed. So we
// must try/retry until incRef succeeds:
if (s.IndexReader.TryIncRef())
{
return s;
}
}
}
public void Release(ShardIndexSearcher s)
{
s.IndexReader.DecRef();
}
// Get and old searcher matching the specified versions:
public ShardIndexSearcher Acquire(long[] nodeVersions)
{
IndexSearcher s = Searchers.Acquire(nodeVersions[MyNodeID]);
if (s == null)
{
throw new SearcherExpiredException("nodeID=" + MyNodeID + " version=" + nodeVersions[MyNodeID]);
}
return new ShardIndexSearcher(this, nodeVersions, s.IndexReader, MyNodeID);
}
// Reopen local reader
public void Reopen()
{
IndexSearcher before = Mgr.Acquire();
Mgr.Release(before);
Mgr.MaybeRefresh();
IndexSearcher after = Mgr.Acquire();
try
{
if (after != before)
{
// New searcher was opened
long version = Searchers.Record(after);
Searchers.Prune(new SearcherLifetimeManager.PruneByAge(OuterInstance.MaxSearcherAgeSeconds));
OuterInstance.BroadcastNodeReopen(MyNodeID, version, after);
}
}
finally
{
Mgr.Release(after);
}
}
public void Dispose()
{
if (CurrentShardSearcher != null)
{
CurrentShardSearcher.IndexReader.DecRef();
}
Searchers.Dispose();
Mgr.Dispose();
Writer.Dispose();
Dir.Dispose();
}
}
// TODO: make this more realistic, ie, each node should
// have its own thread, so we have true node to node
// concurrency
private sealed class ChangeIndices : ThreadClass
{
private readonly ShardSearchingTestBase OuterInstance;
public ChangeIndices(ShardSearchingTestBase outerInstance)
{
this.OuterInstance = outerInstance;
}
public override void Run()
{
try
{
LineFileDocs docs = new LineFileDocs(Random(), DefaultCodecSupportsDocValues());
int numDocs = 0;
while (DateTime.UtcNow < OuterInstance.EndTime)
{
int what = Random().Next(3);
NodeState node = OuterInstance.Nodes[Random().Next(OuterInstance.Nodes.Length)];
if (numDocs == 0 || what == 0)
{
node.Writer.AddDocument(docs.NextDoc());
numDocs++;
}
else if (what == 1)
{
node.Writer.UpdateDocument(new Term("docid", "" + Random().Next(numDocs)), docs.NextDoc());
numDocs++;
}
else
{
node.Writer.DeleteDocuments(new Term("docid", "" + Random().Next(numDocs)));
}
// TODO: doc blocks too
if (Random().Next(17) == 12)
{
node.Writer.Commit();
}
if (Random().Next(17) == 12)
{
OuterInstance.Nodes[Random().Next(OuterInstance.Nodes.Length)].Reopen();
}
}
}
catch (Exception t)
{
Console.WriteLine("FAILED:");
Console.Out.WriteLine(t.StackTrace);
throw new Exception(t.Message, t);
}
}
}
protected internal NodeState[] Nodes;
internal int MaxSearcherAgeSeconds;
protected DateTime EndTime;
private ThreadClass ChangeIndicesThread;
protected internal virtual void Start(int numNodes, double runTimeSec, int maxSearcherAgeSeconds)
{
EndTime = DateTime.UtcNow.AddSeconds(runTimeSec);
this.MaxSearcherAgeSeconds = maxSearcherAgeSeconds;
Nodes = new NodeState[numNodes];
for (int nodeID = 0; nodeID < numNodes; nodeID++)
{
Nodes[nodeID] = new NodeState(this, Random(), nodeID, numNodes);
}
long[] nodeVersions = new long[Nodes.Length];
for (int nodeID = 0; nodeID < numNodes; nodeID++)
{
IndexSearcher s = Nodes[nodeID].Mgr.Acquire();
try
{
nodeVersions[nodeID] = Nodes[nodeID].Searchers.Record(s);
}
finally
{
Nodes[nodeID].Mgr.Release(s);
}
}
for (int nodeID = 0; nodeID < numNodes; nodeID++)
{
IndexSearcher s = Nodes[nodeID].Mgr.Acquire();
Debug.Assert(nodeVersions[nodeID] == Nodes[nodeID].Searchers.Record(s));
Debug.Assert(s != null);
try
{
BroadcastNodeReopen(nodeID, nodeVersions[nodeID], s);
}
finally
{
Nodes[nodeID].Mgr.Release(s);
}
}
ChangeIndicesThread = new ChangeIndices(this);
ChangeIndicesThread.Start();
}
protected internal virtual void Finish()
{
ChangeIndicesThread.Join();
foreach (NodeState node in Nodes)
{
node.Dispose();
}
}
/// <summary>
/// An IndexSearcher and associated version (lease)
/// </summary>
protected internal class SearcherAndVersion
{
public readonly IndexSearcher Searcher;
public readonly long Version;
public SearcherAndVersion(IndexSearcher searcher, long version)
{
this.Searcher = searcher;
this.Version = version;
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.ApiManagement;
using Microsoft.Azure.Management.ApiManagement.SmapiModels;
namespace Microsoft.Azure.Management.ApiManagement
{
/// <summary>
/// .Net client wrapper for the REST API for Azure ApiManagement Service
/// </summary>
public static partial class ProductPolicyOperationsExtensions
{
/// <summary>
/// Deletes specific product policy of the Api Management service
/// instance.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductPolicyOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Delete(this IProductPolicyOperations operations, string resourceGroupName, string serviceName, string pid, string etag)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductPolicyOperations)s).DeleteAsync(resourceGroupName, serviceName, pid, etag);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes specific product policy of the Api Management service
/// instance.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductPolicyOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> DeleteAsync(this IProductPolicyOperations operations, string resourceGroupName, string serviceName, string pid, string etag)
{
return operations.DeleteAsync(resourceGroupName, serviceName, pid, etag, CancellationToken.None);
}
/// <summary>
/// Gets specific product policy.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductPolicyOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='format'>
/// Required. Format of the policy. Supported formats:
/// application/vnd.ms-azure-apim.policy+xml
/// </param>
/// <returns>
/// The response model for the get policy output operation.
/// </returns>
public static PolicyGetResponse Get(this IProductPolicyOperations operations, string resourceGroupName, string serviceName, string pid, string format)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductPolicyOperations)s).GetAsync(resourceGroupName, serviceName, pid, format);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets specific product policy.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductPolicyOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='format'>
/// Required. Format of the policy. Supported formats:
/// application/vnd.ms-azure-apim.policy+xml
/// </param>
/// <returns>
/// The response model for the get policy output operation.
/// </returns>
public static Task<PolicyGetResponse> GetAsync(this IProductPolicyOperations operations, string resourceGroupName, string serviceName, string pid, string format)
{
return operations.GetAsync(resourceGroupName, serviceName, pid, format, CancellationToken.None);
}
/// <summary>
/// Sets policy for product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductPolicyOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='format'>
/// Required. Format of the policy. Supported formats:
/// application/vnd.ms-azure-apim.policy+xml
/// </param>
/// <param name='policyStream'>
/// Required. Policy stream.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Set(this IProductPolicyOperations operations, string resourceGroupName, string serviceName, string pid, string format, Stream policyStream)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductPolicyOperations)s).SetAsync(resourceGroupName, serviceName, pid, format, policyStream);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Sets policy for product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductPolicyOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='format'>
/// Required. Format of the policy. Supported formats:
/// application/vnd.ms-azure-apim.policy+xml
/// </param>
/// <param name='policyStream'>
/// Required. Policy stream.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> SetAsync(this IProductPolicyOperations operations, string resourceGroupName, string serviceName, string pid, string format, Stream policyStream)
{
return operations.SetAsync(resourceGroupName, serviceName, pid, format, policyStream, CancellationToken.None);
}
}
}
| |
using PoESkillTree.Engine.GameModel;
using PoESkillTree.SkillTreeFiles;
using System;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Text.RegularExpressions;
namespace PoESkillTree.Utils.UrlProcessing
{
/// <summary>
/// Represents an object that extracts build information from the http://poeplanner.com urls.
/// </summary>
public class PoeplannerUrlDeserializer : BuildUrlDeserializer
{
private static readonly Regex UrlRegex = new Regex(@"(http(|s):\/\/|)(\w*\.|)poeplanner\.com\/(?<build>[\w-=]+)");
private byte[]? _rawData;
/// <summary>
/// Initializes a new instance of the <see cref="PoeplannerUrlDeserializer"/> class.
/// </summary>
/// <param name="buildUrl">The poeplanner build url.</param>
/// <param name="ascendancyClasses">The instance of the <see cref="ascendancyClasses"/>
/// to access general information about skill tree.</param>
private PoeplannerUrlDeserializer(string buildUrl, IAscendancyClasses ascendancyClasses) : base(buildUrl, ascendancyClasses)
{
}
/// <summary>
/// Creates the <see cref="PoeplannerUrlDeserializer"/> class instance if specified url is valid.
/// </summary>
/// <param name="buildUrl">A string containing a build url.</param>
/// <param name="deserializer">When this method returns, contains the deserializer instance or null, if url conversion is impossible.</param>
/// <returns>true if deserializer was created successfully; otherwise, false.</returns>
public static bool TryCreate(
string buildUrl, IAscendancyClasses ascendancyClasses,
[NotNullWhen(true)] out BuildUrlDeserializer? deserializer)
{
if (!UrlRegex.IsMatch(buildUrl))
{
deserializer = null;
return false;
}
deserializer = new PoeplannerUrlDeserializer(buildUrl, ascendancyClasses);
return true;
}
public override bool ValidateBuildUrl([NotNullWhen(false)] out Exception? exception)
{
try
{
GetRawData();
exception = null;
return true;
}
catch (Exception e)
{
exception = e;
return false;
}
}
public override BuildUrlData GetBuildData()
{
PoeplannerData data = DecodePoeplannerUrl();
BuildUrlData buildData = ParsePoeplannerData(data);
return buildData;
}
protected override int GetCharacterClassId()
{
var rawData = GetRawData();
return rawData.Length < 6 ? 0 : rawData[5] & 15;
}
public override int GetAscendancyClassId()
{
var rawData = GetRawData();
return rawData.Length < 6 ? 0 : rawData[5] >> 4 & 15;
}
#region Helpers
private byte[] GetRawData()
{
if (_rawData != null)
{
return _rawData;
}
var buildSegment = BuildUrl.Split('/').LastOrDefault();
if (buildSegment == null)
{
return new byte[0];
}
buildSegment = buildSegment
.Replace("-", "+")
.Replace("_", "/");
_rawData = Convert.FromBase64String(buildSegment);
return _rawData;
}
private PoeplannerData DecodePoeplannerUrl()
{
byte[] rawBytes = GetRawData();
var skillsBuffSize = rawBytes[3] << 8 | rawBytes[4];
var aurasBuffSize = rawBytes[5 + skillsBuffSize] << 8 | rawBytes[6 + skillsBuffSize];
var equipBuffSize = rawBytes[7 + skillsBuffSize + aurasBuffSize] << 8 | rawBytes[8 + skillsBuffSize + aurasBuffSize];
var data = new PoeplannerData
{
Version = rawBytes[0] << 8 | rawBytes[1],
ActiveTab = rawBytes[2],
NodesData = new byte[skillsBuffSize],
AurasData = new byte[aurasBuffSize],
EquipmentData = new byte[equipBuffSize]
};
Array.Copy(rawBytes, 5, data.NodesData, 0, skillsBuffSize);
Array.Copy(rawBytes, 7 + skillsBuffSize, data.AurasData, 0, aurasBuffSize);
Array.Copy(rawBytes, 9 + skillsBuffSize + aurasBuffSize, data.EquipmentData, 0, equipBuffSize);
return data;
}
private static BuildUrlData ParsePoeplannerData(PoeplannerData data)
{
var result = new BuildUrlData { Version = data.Version };
// There is a small bug in poeplanner, where class and ascendancy bytes are missing, when no one node was selected.
// Need to check length
if (data.NodesData.Length == 0)
{
return result;
}
result.CharacterClass = (CharacterClass)(data.NodesData[2] & 15);
result.AscendancyClassId = data.NodesData[2] >> 4 & 15;
if (data.NodesData.Length < 4)
{
return result;
}
result.Bandit = ConvertBanditId(data.NodesData[3] & 3);
if (data.NodesData.Length < 6)
{
return result;
}
var skilledNodesCount = data.NodesData[4] << 8 | data.NodesData[5];
int i = 6;
while (i < 2 * skilledNodesCount + 6)
{
result.SkilledNodesIds.Add((ushort)(data.NodesData[i++] << 8 | data.NodesData[i++]));
}
var jeweledNodesCount = data.NodesData[i++];
for (var j = 0; j < jeweledNodesCount; j++)
{
var nodeId = data.NodesData[i++] << 8 | data.NodesData[i++];
var jewelsDataBuffSize = data.NodesData[i++];
var rawJewelData = data.NodesData.Skip(i++).Take(jewelsDataBuffSize).ToList();
i = i + (jewelsDataBuffSize - 1);
result.Jewels[nodeId] = rawJewelData;
}
return result;
}
private static Bandit ConvertBanditId(int id)
{
switch (id)
{
case 1:
return Bandit.Alira;
case 2:
return Bandit.Kraityn;
case 3:
return Bandit.Oak;
default:
return Bandit.None;
}
}
/// <summary>
/// Represents preprocessed raw data.
/// </summary>
private class PoeplannerData
{
public int Version { get; set; }
public byte ActiveTab { get; set; }
public byte[] NodesData { get; set; } = default!;
internal byte[] AurasData { get; set; } = default!;
internal byte[] EquipmentData { get; set; } = default!;
}
#endregion
}
}
| |
// Copyright (C) 2014 dot42
//
// Original filename: Android.Net.Http.cs
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma warning disable 1717
namespace Android.Net.Http
{
/// <summary>
/// <para>This class represents a set of one or more SSL errors and the associated SSL certificate. </para>
/// </summary>
/// <java-name>
/// android/net/http/SslError
/// </java-name>
[Dot42.DexImport("android/net/http/SslError", AccessFlags = 33)]
public partial class SslError
/* scope: __dot42__ */
{
/// <summary>
/// <para>Individual SSL errors (in the order from the least to the most severe): The certificate is not yet valid </para>
/// </summary>
/// <java-name>
/// SSL_NOTYETVALID
/// </java-name>
[Dot42.DexImport("SSL_NOTYETVALID", "I", AccessFlags = 25)]
public const int SSL_NOTYETVALID = 0;
/// <summary>
/// <para>The certificate has expired </para>
/// </summary>
/// <java-name>
/// SSL_EXPIRED
/// </java-name>
[Dot42.DexImport("SSL_EXPIRED", "I", AccessFlags = 25)]
public const int SSL_EXPIRED = 1;
/// <summary>
/// <para>Hostname mismatch </para>
/// </summary>
/// <java-name>
/// SSL_IDMISMATCH
/// </java-name>
[Dot42.DexImport("SSL_IDMISMATCH", "I", AccessFlags = 25)]
public const int SSL_IDMISMATCH = 2;
/// <summary>
/// <para>The certificate authority is not trusted </para>
/// </summary>
/// <java-name>
/// SSL_UNTRUSTED
/// </java-name>
[Dot42.DexImport("SSL_UNTRUSTED", "I", AccessFlags = 25)]
public const int SSL_UNTRUSTED = 3;
/// <summary>
/// <para>The number of different SSL errors. <xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>This constant is not necessary for using the SslError API and can change from release to release. </para></xrefdescription></xrefsect></para>
/// </summary>
/// <java-name>
/// SSL_MAX_ERROR
/// </java-name>
[Dot42.DexImport("SSL_MAX_ERROR", "I", AccessFlags = 25)]
public const int SSL_MAX_ERROR = 4;
/// <summary>
/// <para>Creates a new SslError object using the supplied error and certificate. The URL will be set to the empty string. <xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>Use SslError(int, SslCertificate, String) </para></xrefdescription></xrefsect></para>
/// </summary>
[Dot42.DexImport("<init>", "(ILandroid/net/http/SslCertificate;)V", AccessFlags = 1)]
public SslError(int error, global::Android.Net.Http.SslCertificate certificate) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Creates a new SslError object using the supplied error and certificate. The URL will be set to the empty string. <xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>Use SslError(int, SslCertificate, String) </para></xrefdescription></xrefsect></para>
/// </summary>
[Dot42.DexImport("<init>", "(ILjava/security/cert/X509Certificate;)V", AccessFlags = 1)]
public SslError(int error, global::Java.Security.Cert.X509Certificate certificate) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Gets the SSL certificate associated with this object. </para>
/// </summary>
/// <returns>
/// <para>The SSL certificate, non-null. </para>
/// </returns>
/// <java-name>
/// getCertificate
/// </java-name>
[Dot42.DexImport("getCertificate", "()Landroid/net/http/SslCertificate;", AccessFlags = 1)]
public virtual global::Android.Net.Http.SslCertificate GetCertificate() /* MethodBuilder.Create */
{
return default(global::Android.Net.Http.SslCertificate);
}
/// <summary>
/// <para>Adds the supplied SSL error to the set. </para>
/// </summary>
/// <returns>
/// <para>True if the error being added is a known SSL error, otherwise false. </para>
/// </returns>
/// <java-name>
/// addError
/// </java-name>
[Dot42.DexImport("addError", "(I)Z", AccessFlags = 1)]
public virtual bool AddError(int error) /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Determines whether this object includes the supplied error. </para>
/// </summary>
/// <returns>
/// <para>True if this object includes the error, otherwise false. </para>
/// </returns>
/// <java-name>
/// hasError
/// </java-name>
[Dot42.DexImport("hasError", "(I)Z", AccessFlags = 1)]
public virtual bool HasError(int error) /* MethodBuilder.Create */
{
return default(bool);
}
/// <summary>
/// <para>Gets the most severe SSL error in this object's set of errors. Returns -1 if the set is empty. </para>
/// </summary>
/// <returns>
/// <para>The most severe SSL error, or -1 if the set is empty. </para>
/// </returns>
/// <java-name>
/// getPrimaryError
/// </java-name>
[Dot42.DexImport("getPrimaryError", "()I", AccessFlags = 1)]
public virtual int GetPrimaryError() /* MethodBuilder.Create */
{
return default(int);
}
/// <summary>
/// <para>Returns a string representation of this object. </para>
/// </summary>
/// <returns>
/// <para>A String representation of this object. </para>
/// </returns>
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 1)]
public override string ToString() /* MethodBuilder.Create */
{
return default(string);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal SslError() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para>Gets the SSL certificate associated with this object. </para>
/// </summary>
/// <returns>
/// <para>The SSL certificate, non-null. </para>
/// </returns>
/// <java-name>
/// getCertificate
/// </java-name>
public global::Android.Net.Http.SslCertificate Certificate
{
[Dot42.DexImport("getCertificate", "()Landroid/net/http/SslCertificate;", AccessFlags = 1)]
get{ return GetCertificate(); }
}
/// <summary>
/// <para>Gets the most severe SSL error in this object's set of errors. Returns -1 if the set is empty. </para>
/// </summary>
/// <returns>
/// <para>The most severe SSL error, or -1 if the set is empty. </para>
/// </returns>
/// <java-name>
/// getPrimaryError
/// </java-name>
public int PrimaryError
{
[Dot42.DexImport("getPrimaryError", "()I", AccessFlags = 1)]
get{ return GetPrimaryError(); }
}
}
/// <summary>
/// <para>Implementation of the Apache DefaultHttpClient that is configured with reasonable default settings and registered schemes for Android. Don't create this directly, use the newInstance factory method.</para><para>This client processes cookies but does not retain them by default. To retain cookies, simply add a cookie store to the HttpContext:</para><para><pre>context.setAttribute(ClientContext.COOKIE_STORE, cookieStore);</pre> </para>
/// </summary>
/// <java-name>
/// android/net/http/AndroidHttpClient
/// </java-name>
[Dot42.DexImport("android/net/http/AndroidHttpClient", AccessFlags = 49)]
public sealed partial class AndroidHttpClient : global::Org.Apache.Http.Client.IHttpClient
/* scope: __dot42__ */
{
/// <java-name>
/// DEFAULT_SYNC_MIN_GZIP_BYTES
/// </java-name>
[Dot42.DexImport("DEFAULT_SYNC_MIN_GZIP_BYTES", "J", AccessFlags = 9)]
public static long DEFAULT_SYNC_MIN_GZIP_BYTES;
[Dot42.DexImport("<init>", "()V", AccessFlags = 0)]
internal AndroidHttpClient() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Create a new HttpClient with reasonable defaults (which you can update).</para><para></para>
/// </summary>
/// <returns>
/// <para>AndroidHttpClient for you to use for all your requests. </para>
/// </returns>
/// <java-name>
/// newInstance
/// </java-name>
[Dot42.DexImport("newInstance", "(Ljava/lang/String;Landroid/content/Context;)Landroid/net/http/AndroidHttpClient;" +
"", AccessFlags = 9)]
public static global::Android.Net.Http.AndroidHttpClient NewInstance(string userAgent, global::Android.Content.Context context) /* MethodBuilder.Create */
{
return default(global::Android.Net.Http.AndroidHttpClient);
}
/// <summary>
/// <para>Create a new HttpClient with reasonable defaults (which you can update). </para>
/// </summary>
/// <returns>
/// <para>AndroidHttpClient for you to use for all your requests. </para>
/// </returns>
/// <java-name>
/// newInstance
/// </java-name>
[Dot42.DexImport("newInstance", "(Ljava/lang/String;)Landroid/net/http/AndroidHttpClient;", AccessFlags = 9)]
public static global::Android.Net.Http.AndroidHttpClient NewInstance(string userAgent) /* MethodBuilder.Create */
{
return default(global::Android.Net.Http.AndroidHttpClient);
}
/// <java-name>
/// finalize
/// </java-name>
[Dot42.DexImport("finalize", "()V", AccessFlags = 4)]
extern ~AndroidHttpClient() /* MethodBuilder.Create */ ;
/// <summary>
/// <para>Modifies a request to indicate to the server that we would like a gzipped response. (Uses the "Accept-Encoding" HTTP header.) <para>getUngzippedContent </para></para>
/// </summary>
/// <java-name>
/// modifyRequestToAcceptGzipResponse
/// </java-name>
[Dot42.DexImport("modifyRequestToAcceptGzipResponse", "(Lorg/apache/http/HttpRequest;)V", AccessFlags = 9)]
public static void ModifyRequestToAcceptGzipResponse(global::Org.Apache.Http.IHttpRequest request) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Gets the input stream from a response entity. If the entity is gzipped then this will get a stream over the uncompressed data.</para><para></para>
/// </summary>
/// <returns>
/// <para>the input stream to read from </para>
/// </returns>
/// <java-name>
/// getUngzippedContent
/// </java-name>
[Dot42.DexImport("getUngzippedContent", "(Lorg/apache/http/HttpEntity;)Ljava/io/InputStream;", AccessFlags = 9)]
public static global::Java.Io.InputStream GetUngzippedContent(global::Org.Apache.Http.IHttpEntity entity) /* MethodBuilder.Create */
{
return default(global::Java.Io.InputStream);
}
/// <summary>
/// <para>Release resources associated with this client. You must call this, or significant resources (sockets and memory) may be leaked. </para>
/// </summary>
/// <java-name>
/// close
/// </java-name>
[Dot42.DexImport("close", "()V", AccessFlags = 1)]
public void Close() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Obtains the parameters for this client. These parameters will become defaults for all requests being executed with this client, and for the parameters of dependent objects in this client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the default parameters </para>
/// </returns>
/// <java-name>
/// getParams
/// </java-name>
[Dot42.DexImport("getParams", "()Lorg/apache/http/params/HttpParams;", AccessFlags = 1)]
public global::Org.Apache.Http.Params.IHttpParams GetParams() /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Params.IHttpParams);
}
/// <summary>
/// <para>Obtains the connection manager used by this client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the connection manager </para>
/// </returns>
/// <java-name>
/// getConnectionManager
/// </java-name>
[Dot42.DexImport("getConnectionManager", "()Lorg/apache/http/conn/ClientConnectionManager;", AccessFlags = 1)]
public global::Org.Apache.Http.Conn.IClientConnectionManager GetConnectionManager() /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Conn.IClientConnectionManager);
}
/// <summary>
/// <para>Executes a request using the default context.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/client/methods/HttpUriRequest;)Lorg/apache/http/HttpResponse;", AccessFlags = 1)]
public global::Org.Apache.Http.IHttpResponse Execute(global::Org.Apache.Http.Client.Methods.IHttpUriRequest request) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHttpResponse);
}
/// <summary>
/// <para>Executes a request using the given context. The route to the target will be determined by the HTTP client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/client/methods/HttpUriRequest;Lorg/apache/http/protocol/HttpCon" +
"text;)Lorg/apache/http/HttpResponse;", AccessFlags = 1)]
public global::Org.Apache.Http.IHttpResponse Execute(global::Org.Apache.Http.Client.Methods.IHttpUriRequest request, global::Org.Apache.Http.Protocol.IHttpContext context) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHttpResponse);
}
/// <summary>
/// <para>Executes a request using the given context. The route to the target will be determined by the HTTP client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/HttpHost;Lorg/apache/http/HttpRequest;)Lorg/apache/http/HttpRes" +
"ponse;", AccessFlags = 1)]
public global::Org.Apache.Http.IHttpResponse Execute(global::Org.Apache.Http.HttpHost request, global::Org.Apache.Http.IHttpRequest context) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHttpResponse);
}
/// <summary>
/// <para>Executes a request to the target using the given context.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/HttpHost;Lorg/apache/http/HttpRequest;Lorg/apache/http/protocol" +
"/HttpContext;)Lorg/apache/http/HttpResponse;", AccessFlags = 1)]
public global::Org.Apache.Http.IHttpResponse Execute(global::Org.Apache.Http.HttpHost target, global::Org.Apache.Http.IHttpRequest request, global::Org.Apache.Http.Protocol.IHttpContext context) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.IHttpResponse);
}
/// <summary>
/// <para>Executes a request using the given context. The route to the target will be determined by the HTTP client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/client/methods/HttpUriRequest;Lorg/apache/http/client/ResponseH" +
"andler;)Ljava/lang/Object;", AccessFlags = 1, Signature = "<T:Ljava/lang/Object;>(Lorg/apache/http/client/methods/HttpUriRequest;Lorg/apache" +
"/http/client/ResponseHandler<+TT;>;)TT;")]
public T Execute<T>(global::Org.Apache.Http.Client.Methods.IHttpUriRequest request, global::Org.Apache.Http.Client.IResponseHandler<T> context) /* MethodBuilder.Create */
{
return default(T);
}
/// <summary>
/// <para>Executes a request to the target using the given context.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/client/methods/HttpUriRequest;Lorg/apache/http/client/ResponseH" +
"andler;Lorg/apache/http/protocol/HttpContext;)Ljava/lang/Object;", AccessFlags = 1, Signature = "<T:Ljava/lang/Object;>(Lorg/apache/http/client/methods/HttpUriRequest;Lorg/apache" +
"/http/client/ResponseHandler<+TT;>;Lorg/apache/http/protocol/HttpContext;)TT;")]
public T Execute<T>(global::Org.Apache.Http.Client.Methods.IHttpUriRequest target, global::Org.Apache.Http.Client.IResponseHandler<T> request, global::Org.Apache.Http.Protocol.IHttpContext context) /* MethodBuilder.Create */
{
return default(T);
}
/// <summary>
/// <para>Executes a request to the target using the given context.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response to the request. This is always a final response, never an intermediate response with an 1xx status code. Whether redirects or authentication challenges will be returned or handled automatically depends on the implementation and configuration of this client. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/HttpHost;Lorg/apache/http/HttpRequest;Lorg/apache/http/client/R" +
"esponseHandler;)Ljava/lang/Object;", AccessFlags = 1, Signature = "<T:Ljava/lang/Object;>(Lorg/apache/http/HttpHost;Lorg/apache/http/HttpRequest;Lor" +
"g/apache/http/client/ResponseHandler<+TT;>;)TT;")]
public T Execute<T>(global::Org.Apache.Http.HttpHost target, global::Org.Apache.Http.IHttpRequest request, global::Org.Apache.Http.Client.IResponseHandler<T> context) /* MethodBuilder.Create */
{
return default(T);
}
/// <summary>
/// <para>Executes a request to the target using the given context and processes the response using the given response handler.</para><para></para>
/// </summary>
/// <returns>
/// <para>the response object as generated by the response handler. </para>
/// </returns>
/// <java-name>
/// execute
/// </java-name>
[Dot42.DexImport("execute", "(Lorg/apache/http/HttpHost;Lorg/apache/http/HttpRequest;Lorg/apache/http/client/R" +
"esponseHandler;Lorg/apache/http/protocol/HttpContext;)Ljava/lang/Object;", AccessFlags = 1, Signature = "<T:Ljava/lang/Object;>(Lorg/apache/http/HttpHost;Lorg/apache/http/HttpRequest;Lor" +
"g/apache/http/client/ResponseHandler<+TT;>;Lorg/apache/http/protocol/HttpContext" +
";)TT;")]
public T Execute<T>(global::Org.Apache.Http.HttpHost target, global::Org.Apache.Http.IHttpRequest request, global::Org.Apache.Http.Client.IResponseHandler<T> responseHandler, global::Org.Apache.Http.Protocol.IHttpContext context) /* MethodBuilder.Create */
{
return default(T);
}
/// <summary>
/// <para>Compress data to send to server. Creates a Http Entity holding the gzipped data. The data will not be compressed if it is too short. </para>
/// </summary>
/// <returns>
/// <para>Entity holding the data </para>
/// </returns>
/// <java-name>
/// getCompressedEntity
/// </java-name>
[Dot42.DexImport("getCompressedEntity", "([BLandroid/content/ContentResolver;)Lorg/apache/http/entity/AbstractHttpEntity;", AccessFlags = 9)]
public static global::Org.Apache.Http.Entity.AbstractHttpEntity GetCompressedEntity(sbyte[] data, global::Android.Content.ContentResolver resolver) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Entity.AbstractHttpEntity);
}
/// <summary>
/// <para>Compress data to send to server. Creates a Http Entity holding the gzipped data. The data will not be compressed if it is too short. </para>
/// </summary>
/// <returns>
/// <para>Entity holding the data </para>
/// </returns>
/// <java-name>
/// getCompressedEntity
/// </java-name>
[Dot42.DexImport("getCompressedEntity", "([BLandroid/content/ContentResolver;)Lorg/apache/http/entity/AbstractHttpEntity;", AccessFlags = 9, IgnoreFromJava = true)]
public static global::Org.Apache.Http.Entity.AbstractHttpEntity GetCompressedEntity(byte[] data, global::Android.Content.ContentResolver resolver) /* MethodBuilder.Create */
{
return default(global::Org.Apache.Http.Entity.AbstractHttpEntity);
}
/// <summary>
/// <para>Retrieves the minimum size for compressing data. Shorter data will not be compressed. </para>
/// </summary>
/// <java-name>
/// getMinGzipSize
/// </java-name>
[Dot42.DexImport("getMinGzipSize", "(Landroid/content/ContentResolver;)J", AccessFlags = 9)]
public static long GetMinGzipSize(global::Android.Content.ContentResolver resolver) /* MethodBuilder.Create */
{
return default(long);
}
/// <summary>
/// <para>Enables cURL request logging for this client.</para><para></para>
/// </summary>
/// <java-name>
/// enableCurlLogging
/// </java-name>
[Dot42.DexImport("enableCurlLogging", "(Ljava/lang/String;I)V", AccessFlags = 1)]
public void EnableCurlLogging(string name, int level) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Disables cURL logging for this client. </para>
/// </summary>
/// <java-name>
/// disableCurlLogging
/// </java-name>
[Dot42.DexImport("disableCurlLogging", "()V", AccessFlags = 1)]
public void DisableCurlLogging() /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Returns the date of the given HTTP date string. This method can identify and parse the date formats emitted by common HTTP servers, such as , , , and .</para><para></para>
/// </summary>
/// <returns>
/// <para>the number of milliseconds since Jan. 1, 1970, midnight GMT. </para>
/// </returns>
/// <java-name>
/// parseDate
/// </java-name>
[Dot42.DexImport("parseDate", "(Ljava/lang/String;)J", AccessFlags = 9)]
public static long ParseDate(string dateString) /* MethodBuilder.Create */
{
return default(long);
}
/// <summary>
/// <para>Obtains the parameters for this client. These parameters will become defaults for all requests being executed with this client, and for the parameters of dependent objects in this client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the default parameters </para>
/// </returns>
/// <java-name>
/// getParams
/// </java-name>
public global::Org.Apache.Http.Params.IHttpParams Params
{
[Dot42.DexImport("getParams", "()Lorg/apache/http/params/HttpParams;", AccessFlags = 1)]
get{ return GetParams(); }
}
/// <summary>
/// <para>Obtains the connection manager used by this client.</para><para></para>
/// </summary>
/// <returns>
/// <para>the connection manager </para>
/// </returns>
/// <java-name>
/// getConnectionManager
/// </java-name>
public global::Org.Apache.Http.Conn.IClientConnectionManager ConnectionManager
{
[Dot42.DexImport("getConnectionManager", "()Lorg/apache/http/conn/ClientConnectionManager;", AccessFlags = 1)]
get{ return GetConnectionManager(); }
}
}
/// <summary>
/// <para>SSL certificate info (certificate details) class </para>
/// </summary>
/// <java-name>
/// android/net/http/SslCertificate
/// </java-name>
[Dot42.DexImport("android/net/http/SslCertificate", AccessFlags = 33)]
public partial class SslCertificate
/* scope: __dot42__ */
{
[Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", AccessFlags = 1)]
public SslCertificate(string @string, string string1, string string2, string string3) /* MethodBuilder.Create */
{
}
[Dot42.DexImport("<init>", "(Ljava/lang/String;Ljava/lang/String;Ljava/util/Date;Ljava/util/Date;)V", AccessFlags = 1)]
public SslCertificate(string @string, string string1, global::Java.Util.Date date, global::Java.Util.Date date1) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Creates a new SSL certificate object from an X509 certificate </para>
/// </summary>
[Dot42.DexImport("<init>", "(Ljava/security/cert/X509Certificate;)V", AccessFlags = 1)]
public SslCertificate(global::Java.Security.Cert.X509Certificate certificate) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para>Saves the certificate state to a bundle </para>
/// </summary>
/// <returns>
/// <para>A bundle with the certificate stored in it or null if fails </para>
/// </returns>
/// <java-name>
/// saveState
/// </java-name>
[Dot42.DexImport("saveState", "(Landroid/net/http/SslCertificate;)Landroid/os/Bundle;", AccessFlags = 9)]
public static global::Android.Os.Bundle SaveState(global::Android.Net.Http.SslCertificate certificate) /* MethodBuilder.Create */
{
return default(global::Android.Os.Bundle);
}
/// <summary>
/// <para>Restores the certificate stored in the bundle </para>
/// </summary>
/// <returns>
/// <para>The SSL certificate stored in the bundle or null if fails </para>
/// </returns>
/// <java-name>
/// restoreState
/// </java-name>
[Dot42.DexImport("restoreState", "(Landroid/os/Bundle;)Landroid/net/http/SslCertificate;", AccessFlags = 9)]
public static global::Android.Net.Http.SslCertificate RestoreState(global::Android.Os.Bundle bundle) /* MethodBuilder.Create */
{
return default(global::Android.Net.Http.SslCertificate);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Not-before date from the certificate validity period or "" if none has been set </para>
/// </returns>
/// <java-name>
/// getValidNotBeforeDate
/// </java-name>
[Dot42.DexImport("getValidNotBeforeDate", "()Ljava/util/Date;", AccessFlags = 1)]
public virtual global::Java.Util.Date GetValidNotBeforeDate() /* MethodBuilder.Create */
{
return default(global::Java.Util.Date);
}
/// <summary>
/// <para><xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>Use getValidNotBeforeDate() </para></xrefdescription></xrefsect></para>
/// </summary>
/// <returns>
/// <para>Not-before date from the certificate validity period in ISO 8601 format or "" if none has been set</para>
/// </returns>
/// <java-name>
/// getValidNotBefore
/// </java-name>
[Dot42.DexImport("getValidNotBefore", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetValidNotBefore() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Not-after date from the certificate validity period or "" if none has been set </para>
/// </returns>
/// <java-name>
/// getValidNotAfterDate
/// </java-name>
[Dot42.DexImport("getValidNotAfterDate", "()Ljava/util/Date;", AccessFlags = 1)]
public virtual global::Java.Util.Date GetValidNotAfterDate() /* MethodBuilder.Create */
{
return default(global::Java.Util.Date);
}
/// <summary>
/// <para><xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>Use getValidNotAfterDate() </para></xrefdescription></xrefsect></para>
/// </summary>
/// <returns>
/// <para>Not-after date from the certificate validity period in ISO 8601 format or "" if none has been set</para>
/// </returns>
/// <java-name>
/// getValidNotAfter
/// </java-name>
[Dot42.DexImport("getValidNotAfter", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetValidNotAfter() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Issued-to distinguished name or null if none has been set </para>
/// </returns>
/// <java-name>
/// getIssuedTo
/// </java-name>
[Dot42.DexImport("getIssuedTo", "()Landroid/net/http/SslCertificate$DName;", AccessFlags = 1)]
public virtual global::Android.Net.Http.SslCertificate.DName GetIssuedTo() /* MethodBuilder.Create */
{
return default(global::Android.Net.Http.SslCertificate.DName);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Issued-by distinguished name or null if none has been set </para>
/// </returns>
/// <java-name>
/// getIssuedBy
/// </java-name>
[Dot42.DexImport("getIssuedBy", "()Landroid/net/http/SslCertificate$DName;", AccessFlags = 1)]
public virtual global::Android.Net.Http.SslCertificate.DName GetIssuedBy() /* MethodBuilder.Create */
{
return default(global::Android.Net.Http.SslCertificate.DName);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>A string representation of this certificate for debugging </para>
/// </returns>
/// <java-name>
/// toString
/// </java-name>
[Dot42.DexImport("toString", "()Ljava/lang/String;", AccessFlags = 1)]
public override string ToString() /* MethodBuilder.Create */
{
return default(string);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal SslCertificate() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Not-before date from the certificate validity period or "" if none has been set </para>
/// </returns>
/// <java-name>
/// getValidNotBeforeDate
/// </java-name>
public global::Java.Util.Date ValidNotBeforeDate
{
[Dot42.DexImport("getValidNotBeforeDate", "()Ljava/util/Date;", AccessFlags = 1)]
get{ return GetValidNotBeforeDate(); }
}
/// <summary>
/// <para><xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>Use getValidNotBeforeDate() </para></xrefdescription></xrefsect></para>
/// </summary>
/// <returns>
/// <para>Not-before date from the certificate validity period in ISO 8601 format or "" if none has been set</para>
/// </returns>
/// <java-name>
/// getValidNotBefore
/// </java-name>
public string ValidNotBefore
{
[Dot42.DexImport("getValidNotBefore", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetValidNotBefore(); }
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Not-after date from the certificate validity period or "" if none has been set </para>
/// </returns>
/// <java-name>
/// getValidNotAfterDate
/// </java-name>
public global::Java.Util.Date ValidNotAfterDate
{
[Dot42.DexImport("getValidNotAfterDate", "()Ljava/util/Date;", AccessFlags = 1)]
get{ return GetValidNotAfterDate(); }
}
/// <summary>
/// <para><xrefsect><xreftitle>Deprecated</xreftitle><xrefdescription><para>Use getValidNotAfterDate() </para></xrefdescription></xrefsect></para>
/// </summary>
/// <returns>
/// <para>Not-after date from the certificate validity period in ISO 8601 format or "" if none has been set</para>
/// </returns>
/// <java-name>
/// getValidNotAfter
/// </java-name>
public string ValidNotAfter
{
[Dot42.DexImport("getValidNotAfter", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetValidNotAfter(); }
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Issued-to distinguished name or null if none has been set </para>
/// </returns>
/// <java-name>
/// getIssuedTo
/// </java-name>
public global::Android.Net.Http.SslCertificate.DName IssuedTo
{
[Dot42.DexImport("getIssuedTo", "()Landroid/net/http/SslCertificate$DName;", AccessFlags = 1)]
get{ return GetIssuedTo(); }
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>Issued-by distinguished name or null if none has been set </para>
/// </returns>
/// <java-name>
/// getIssuedBy
/// </java-name>
public global::Android.Net.Http.SslCertificate.DName IssuedBy
{
[Dot42.DexImport("getIssuedBy", "()Landroid/net/http/SslCertificate$DName;", AccessFlags = 1)]
get{ return GetIssuedBy(); }
}
/// <summary>
/// <para>A distinguished name helper class: a 3-tuple of: <ul><li><para>the most specific common name (CN) </para></li><li><para>the most specific organization (O) </para></li><li><para>the most specific organizational unit (OU) <ul><li></li></ul></para></li></ul></para>
/// </summary>
/// <java-name>
/// android/net/http/SslCertificate$DName
/// </java-name>
[Dot42.DexImport("android/net/http/SslCertificate$DName", AccessFlags = 1)]
public partial class DName
/* scope: __dot42__ */
{
/// <java-name>
/// this$0
/// </java-name>
[Dot42.DexImport("this$0", "Landroid/net/http/SslCertificate;", AccessFlags = 4112)]
internal readonly global::Android.Net.Http.SslCertificate This_0;
[Dot42.DexImport("<init>", "(Landroid/net/http/SslCertificate;Ljava/lang/String;)V", AccessFlags = 1)]
public DName(global::Android.Net.Http.SslCertificate sslCertificate, string @string) /* MethodBuilder.Create */
{
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The distinguished name (normally includes CN, O, and OU names) </para>
/// </returns>
/// <java-name>
/// getDName
/// </java-name>
[Dot42.DexImport("getDName", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetDName() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The most specific Common-name (CN) component of this name </para>
/// </returns>
/// <java-name>
/// getCName
/// </java-name>
[Dot42.DexImport("getCName", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetCName() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The most specific Organization (O) component of this name </para>
/// </returns>
/// <java-name>
/// getOName
/// </java-name>
[Dot42.DexImport("getOName", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetOName() /* MethodBuilder.Create */
{
return default(string);
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The most specific Organizational Unit (OU) component of this name </para>
/// </returns>
/// <java-name>
/// getUName
/// </java-name>
[Dot42.DexImport("getUName", "()Ljava/lang/String;", AccessFlags = 1)]
public virtual string GetUName() /* MethodBuilder.Create */
{
return default(string);
}
[global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)]
internal DName() /* TypeBuilder.AddDefaultConstructor */
{
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The most specific Common-name (CN) component of this name </para>
/// </returns>
/// <java-name>
/// getCName
/// </java-name>
public string CName
{
[Dot42.DexImport("getCName", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetCName(); }
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The most specific Organization (O) component of this name </para>
/// </returns>
/// <java-name>
/// getOName
/// </java-name>
public string OName
{
[Dot42.DexImport("getOName", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetOName(); }
}
/// <summary>
/// <para></para>
/// </summary>
/// <returns>
/// <para>The most specific Organizational Unit (OU) component of this name </para>
/// </returns>
/// <java-name>
/// getUName
/// </java-name>
public string UName
{
[Dot42.DexImport("getUName", "()Ljava/lang/String;", AccessFlags = 1)]
get{ return GetUName(); }
}
}
}
}
| |
//
// SearchEntry.cs
//
// Author:
// Aaron Bockover <abockover@novell.com>
// Gabriel Burt <gburt@novell.com>
//
// Copyright (C) 2007 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using Gtk;
namespace Banshee.Widgets
{
public class SearchEntry : EventBox
{
private HBox box;
private Entry entry;
private HoverImageButton filter_button;
private HoverImageButton clear_button;
private Menu menu;
private int active_filter_id = -1;
private uint changed_timeout_id = 0;
private string empty_message;
private bool ready = false;
private event EventHandler filter_changed;
private event EventHandler entry_changed;
public event EventHandler Changed {
add { entry_changed += value; }
remove { entry_changed -= value; }
}
public event EventHandler Activated {
add { entry.Activated += value; }
remove { entry.Activated -= value; }
}
public event EventHandler FilterChanged {
add { filter_changed += value; }
remove { filter_changed -= value; }
}
public Menu Menu {
get { return menu; }
}
public SearchEntry()
{
AppPaintable = true;
BuildWidget();
BuildMenu();
NoShowAll = true;
}
private void BuildWidget()
{
box = new HBox();
entry = new FramelessEntry(this);
filter_button = new HoverImageButton(IconSize.Menu, new string [] { "edit-find", Stock.Find });
clear_button = new HoverImageButton(IconSize.Menu, new string [] { "edit-clear", Stock.Clear });
box.PackStart(filter_button, false, false, 0);
box.PackStart(entry, true, true, 0);
box.PackStart(clear_button, false, false, 0);
Add(box);
box.ShowAll();
entry.StyleSet += OnInnerEntryStyleSet;
entry.StateChanged += OnInnerEntryStateChanged;
entry.FocusInEvent += OnInnerEntryFocusEvent;
entry.FocusOutEvent += OnInnerEntryFocusEvent;
entry.Changed += OnInnerEntryChanged;
filter_button.Image.Xpad = 2;
clear_button.Image.Xpad = 2;
filter_button.CanFocus = false;
clear_button.CanFocus = false;
filter_button.ButtonReleaseEvent += OnButtonReleaseEvent;
clear_button.ButtonReleaseEvent += OnButtonReleaseEvent;
clear_button.Clicked += OnClearButtonClicked;
filter_button.Visible = false;
clear_button.Visible = false;
}
private void BuildMenu()
{
menu = new Menu();
menu.Deactivated += OnMenuDeactivated;
}
private void ShowMenu(uint time)
{
if(menu.Children.Length > 0) {
menu.Popup(null, null, OnPositionMenu, 0, time);
menu.ShowAll();
}
}
private void ShowHideButtons()
{
clear_button.Visible = entry.Text.Length > 0;
filter_button.Visible = menu != null && menu.Children.Length > 0;
}
private void OnPositionMenu(Menu menu, out int x, out int y, out bool push_in)
{
int origin_x, origin_y, tmp;
filter_button.GdkWindow.GetOrigin(out origin_x, out tmp);
GdkWindow.GetOrigin(out tmp, out origin_y);
x = origin_x + filter_button.Allocation.X;
y = origin_y + SizeRequest().Height;
push_in = true;
}
private void OnMenuDeactivated(object o, EventArgs args)
{
filter_button.QueueDraw();
}
private bool toggling = false;
private void OnMenuItemToggled(object o, EventArgs args)
{
if(toggling || !(o is FilterMenuItem)) {
return;
}
toggling = true;
FilterMenuItem item = (FilterMenuItem)o;
foreach(MenuItem child_item in menu) {
if(!(child_item is FilterMenuItem)) {
continue;
}
FilterMenuItem filter_child = (FilterMenuItem)child_item;
if(filter_child != item) {
filter_child.Active = false;
}
}
item.Active = true;
ActiveFilterID = item.ID;
toggling = false;
}
private void OnInnerEntryChanged(object o, EventArgs args)
{
ShowHideButtons();
if(changed_timeout_id > 0) {
GLib.Source.Remove(changed_timeout_id);
}
if (Ready)
changed_timeout_id = GLib.Timeout.Add(25, OnChangedTimeout);
}
private bool OnChangedTimeout()
{
OnChanged();
return false;
}
private void UpdateStyle ()
{
Gdk.Color color = entry.Style.Base (entry.State);
filter_button.ModifyBg (entry.State, color);
clear_button.ModifyBg (entry.State, color);
box.BorderWidth = (uint)entry.Style.XThickness;
}
private void OnInnerEntryStyleSet (object o, StyleSetArgs args)
{
UpdateStyle ();
}
private void OnInnerEntryStateChanged (object o, EventArgs args)
{
UpdateStyle ();
}
private void OnInnerEntryFocusEvent(object o, EventArgs args)
{
QueueDraw();
}
private void OnButtonReleaseEvent(object o, ButtonReleaseEventArgs args)
{
if(args.Event.Button != 1) {
return;
}
entry.HasFocus = true;
if(o == filter_button) {
ShowMenu(args.Event.Time);
}
}
private void OnClearButtonClicked(object o, EventArgs args)
{
active_filter_id = 0;
entry.Text = String.Empty;
}
protected override bool OnKeyPressEvent (Gdk.EventKey evnt)
{
if (evnt.Key == Gdk.Key.Escape) {
active_filter_id = 0;
entry.Text = String.Empty;
return true;
}
return base.OnKeyPressEvent (evnt);
}
protected override bool OnExposeEvent(Gdk.EventExpose evnt)
{
PropagateExpose(Child, evnt);
Style.PaintShadow(entry.Style, GdkWindow, StateType.Normal,
ShadowType.In, evnt.Area, entry, "entry",
0, 0, Allocation.Width, Allocation.Height);
return true;
}
protected override void OnShown()
{
base.OnShown();
ShowHideButtons();
}
protected virtual void OnChanged()
{
if(!Ready) {
return;
}
EventHandler handler = entry_changed;
if(handler != null) {
handler(this, EventArgs.Empty);
}
}
protected virtual void OnFilterChanged()
{
EventHandler handler = filter_changed;
if(handler != null) {
handler(this, EventArgs.Empty);
}
if(IsQueryAvailable) {
OnInnerEntryChanged(this, EventArgs.Empty);
}
}
public void AddFilterOption(int id, string label)
{
if(id < 0) {
throw new ArgumentException("id", "must be >= 0");
}
FilterMenuItem item = new FilterMenuItem(id, label);
item.Toggled += OnMenuItemToggled;
menu.Append(item);
if(ActiveFilterID < 0) {
item.Toggle();
}
filter_button.Visible = true;
}
public void AddFilterSeparator()
{
menu.Append(new SeparatorMenuItem());
}
public void RemoveFilterOption(int id)
{
FilterMenuItem item = FindFilterMenuItem(id);
if(item != null) {
menu.Remove(item);
}
}
public void ActivateFilter(int id)
{
FilterMenuItem item = FindFilterMenuItem(id);
if(item != null) {
item.Toggle();
}
}
private FilterMenuItem FindFilterMenuItem(int id)
{
foreach(MenuItem item in menu) {
if(item is FilterMenuItem && ((FilterMenuItem)item).ID == id) {
return (FilterMenuItem)item;
}
}
return null;
}
public string GetLabelForFilterID(int id)
{
FilterMenuItem item = FindFilterMenuItem(id);
if(item == null) {
return null;
}
return item.Label;
}
public void CancelSearch()
{
entry.Text = String.Empty;
ActivateFilter(0);
}
public int ActiveFilterID {
get { return active_filter_id; }
private set {
if(value == active_filter_id) {
return;
}
active_filter_id = value;
OnFilterChanged();
}
}
public string EmptyMessage {
get {
return entry.Sensitive ? empty_message : String.Empty;
}
set {
empty_message = value;
entry.QueueDraw();
}
}
public string Query {
get { return entry.Text.Trim(); }
set { entry.Text = value.Trim(); }
}
public bool IsQueryAvailable {
get { return Query != null && Query != String.Empty; }
}
public bool Ready {
get { return ready; }
set { ready = value; }
}
public new bool HasFocus {
get { return entry.HasFocus; }
set { entry.HasFocus = true; }
}
public Entry InnerEntry {
get { return entry; }
}
protected override void OnStateChanged (Gtk.StateType previous_state)
{
base.OnStateChanged (previous_state);
entry.Sensitive = State != StateType.Insensitive;
filter_button.Sensitive = State != StateType.Insensitive;
clear_button.Sensitive = State != StateType.Insensitive;
}
private class FilterMenuItem : MenuItem /*CheckMenuItem*/
{
private int id;
private string label;
public FilterMenuItem(int id, string label) : base(label)
{
this.id = id;
this.label = label;
//DrawAsRadio = true;
}
public int ID {
get { return id; }
}
public string Label {
get { return label; }
}
// FIXME: Remove when restored to CheckMenuItem
private bool active;
public bool Active {
get { return active; }
set { active = value; }
}
public new event EventHandler Toggled;
protected override void OnActivated ()
{
base.OnActivated ();
if (Toggled != null) {
Toggled (this, EventArgs.Empty);
}
}
}
private class FramelessEntry : Entry
{
private Gdk.Window text_window;
private SearchEntry parent;
private Pango.Layout layout;
private Gdk.GC text_gc;
public FramelessEntry(SearchEntry parent) : base()
{
this.parent = parent;
HasFrame = false;
parent.StyleSet += OnParentStyleSet;
WidthChars = 1;
}
private void OnParentStyleSet(object o, EventArgs args)
{
RefreshGC();
QueueDraw();
}
private void RefreshGC()
{
if(text_window == null) {
return;
}
text_gc = new Gdk.GC(text_window);
text_gc.Copy(Style.TextGC(StateType.Normal));
Gdk.Color color_a = parent.Style.Base(StateType.Normal);
Gdk.Color color_b = parent.Style.Text(StateType.Normal);
text_gc.RgbFgColor = Hyena.Gui.GtkUtilities.ColorBlend(color_a, color_b);
}
protected override bool OnExposeEvent(Gdk.EventExpose evnt)
{
// The Entry's GdkWindow is the top level window onto which
// the frame is drawn; the actual text entry is drawn into a
// separate window, so we can ensure that for themes that don't
// respect HasFrame, we never ever allow the base frame drawing
// to happen
if(evnt.Window == GdkWindow) {
return true;
}
bool ret = base.OnExposeEvent(evnt);
if(text_gc == null || evnt.Window != text_window) {
text_window = evnt.Window;
RefreshGC();
}
if(Text.Length > 0 || HasFocus || parent.EmptyMessage == null) {
return ret;
}
if (layout == null) {
layout = new Pango.Layout(PangoContext);
layout.FontDescription = PangoContext.FontDescription.Copy();
}
int width, height;
layout.SetMarkup(parent.EmptyMessage);
layout.GetPixelSize(out width, out height);
evnt.Window.DrawLayout(text_gc, 2, (SizeRequest().Height - height) / 2, layout);
return ret;
}
}
}
}
| |
//
// System.Configuration.ConfigXmlDocument
//
// Authors:
// Gonzalo Paniagua Javier (gonzalo@ximian.com)
//
// (C) 2002 Ximian, Inc (http://www.ximian.com)
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
#if CONFIGURATION_DEP
using System.Configuration.Internal;
#endif
using System.IO;
using System.Security;
using System.Security.Permissions;
#if (XML_DEP)
using System.Xml;
namespace System.Configuration
{
[PermissionSet (SecurityAction.LinkDemand, Unrestricted = true)]
public sealed class ConfigXmlDocument : XmlDocument, IConfigXmlNode
#if CONFIGURATION_DEP
, IConfigErrorInfo
#endif
{
XmlTextReader reader;
string fileName;
int lineNumber;
public override XmlAttribute CreateAttribute (string prefix,
string localName,
string namespaceUri)
{
return new ConfigXmlAttribute (this, prefix, localName, namespaceUri);
}
public override XmlCDataSection CreateCDataSection (string data)
{
return new ConfigXmlCDataSection (this, data);
}
public override XmlComment CreateComment (string comment)
{
return new ConfigXmlComment (this, comment);
}
public override XmlElement CreateElement (string prefix, string localName, string namespaceUri)
{
return new ConfigXmlElement (this, prefix, localName, namespaceUri);
}
public override XmlSignificantWhitespace CreateSignificantWhitespace (string data)
{
return base.CreateSignificantWhitespace (data);
}
public override XmlText CreateTextNode (string text)
{
return new ConfigXmlText (this, text);
}
public override XmlWhitespace CreateWhitespace (string data)
{
return base.CreateWhitespace (data);
}
public override void Load (string filename)
{
XmlTextReader rd = new XmlTextReader (filename);
try {
rd.MoveToContent ();
LoadSingleElement (filename, rd);
} finally {
rd.Close ();
}
}
public void LoadSingleElement (string filename, XmlTextReader sourceReader)
{
fileName = filename;
lineNumber = sourceReader.LineNumber;
string xml = sourceReader.ReadOuterXml();
reader = new XmlTextReader (new StringReader (xml), sourceReader.NameTable);
Load (reader);
reader.Close ();
}
public string Filename
{
get {
if ((fileName != null) && (fileName.Length > 0) && SecurityManager.SecurityEnabled) {
new FileIOPermission (FileIOPermissionAccess.PathDiscovery, fileName).Demand ();
}
return fileName;
}
}
public int LineNumber
{
get {
return lineNumber;
}
}
#if CONFIGURATION_DEP
string System.Configuration.Internal.IConfigErrorInfo.Filename {
get { return Filename; }
}
int System.Configuration.Internal.IConfigErrorInfo.LineNumber {
get { return LineNumber; }
}
#endif
string IConfigXmlNode.Filename {
get { return Filename; }
}
int IConfigXmlNode.LineNumber {
get { return LineNumber; }
}
//
// Wrappers for Xml* that just provide file name and line number addition
//
class ConfigXmlAttribute : XmlAttribute, IConfigXmlNode
#if CONFIGURATION_DEP
, IConfigErrorInfo
#endif
{
string fileName;
int lineNumber;
public ConfigXmlAttribute (ConfigXmlDocument document,
string prefix,
string localName,
string namespaceUri)
: base (prefix, localName, namespaceUri, document)
{
fileName = document.fileName;
lineNumber = document.LineNumber;
}
public string Filename
{
get {
if ((fileName != null) && (fileName.Length > 0) && SecurityManager.SecurityEnabled) {
new FileIOPermission (FileIOPermissionAccess.PathDiscovery, fileName).Demand ();
}
return fileName;
}
}
public int LineNumber
{
get {
return lineNumber;
}
}
}
class ConfigXmlCDataSection : XmlCDataSection, IConfigXmlNode
#if CONFIGURATION_DEP
, IConfigErrorInfo
#endif
{
string fileName;
int lineNumber;
public ConfigXmlCDataSection (ConfigXmlDocument document, string data)
: base (data, document)
{
fileName = document.fileName;
lineNumber = document.LineNumber;
}
public string Filename
{
get {
if ((fileName != null) && (fileName.Length > 0) && SecurityManager.SecurityEnabled) {
new FileIOPermission (FileIOPermissionAccess.PathDiscovery, fileName).Demand ();
}
return fileName;
}
}
public int LineNumber
{
get {
return lineNumber;
}
}
}
class ConfigXmlComment : XmlComment, IConfigXmlNode
{
string fileName;
int lineNumber;
public ConfigXmlComment (ConfigXmlDocument document, string comment)
: base (comment, document)
{
fileName = document.fileName;
lineNumber = document.LineNumber;
}
public string Filename
{
get {
if ((fileName != null) && (fileName.Length > 0) && SecurityManager.SecurityEnabled) {
new FileIOPermission (FileIOPermissionAccess.PathDiscovery, fileName).Demand ();
}
return fileName;
}
}
public int LineNumber
{
get {
return lineNumber;
}
}
}
class ConfigXmlElement : XmlElement, IConfigXmlNode
#if CONFIGURATION_DEP
, IConfigErrorInfo
#endif
{
string fileName;
int lineNumber;
public ConfigXmlElement (ConfigXmlDocument document,
string prefix,
string localName,
string namespaceUri)
: base (prefix, localName, namespaceUri, document)
{
fileName = document.fileName;
lineNumber = document.LineNumber;
}
public string Filename
{
get {
if ((fileName != null) && (fileName.Length > 0) && SecurityManager.SecurityEnabled) {
new FileIOPermission (FileIOPermissionAccess.PathDiscovery, fileName).Demand ();
}
return fileName;
}
}
public int LineNumber
{
get {
return lineNumber;
}
}
}
class ConfigXmlText : XmlText, IConfigXmlNode
#if CONFIGURATION_DEP
, IConfigErrorInfo
#endif
{
string fileName;
int lineNumber;
public ConfigXmlText (ConfigXmlDocument document, string data)
: base (data, document)
{
fileName = document.fileName;
lineNumber = document.LineNumber;
}
public string Filename
{
get {
if ((fileName != null) && (fileName.Length > 0) && SecurityManager.SecurityEnabled) {
new FileIOPermission (FileIOPermissionAccess.PathDiscovery, fileName).Demand ();
}
return fileName;
}
}
public int LineNumber
{
get {
return lineNumber;
}
}
}
}
}
#endif
| |
//#define VERBOSE_LOGGING
#if UNITY_ANDROID && !UNITY_EDITOR
using System;
using UnityEngine;
namespace java.io
{
public class ByteArrayOutputStream : IDisposable
{
static IntPtr _jcByteArrayOutputStream = IntPtr.Zero;
static IntPtr _jmClose = IntPtr.Zero;
static IntPtr _jmConstructor = IntPtr.Zero;
static IntPtr _jmSize = IntPtr.Zero;
static IntPtr _jmToByteArray = IntPtr.Zero;
private IntPtr _instance = IntPtr.Zero;
static ByteArrayOutputStream()
{
try
{
{
string strName = "java/io/ByteArrayOutputStream";
IntPtr localRef = AndroidJNI.FindClass(strName);
if (localRef != IntPtr.Zero)
{
#if VERBOSE_LOGGING
Debug.Log(string.Format("Found {0} class", strName));
#endif
_jcByteArrayOutputStream = AndroidJNI.NewGlobalRef(localRef);
AndroidJNI.DeleteLocalRef(localRef);
}
else
{
Debug.LogError(string.Format("Failed to find {0} class", strName));
return;
}
}
}
catch (System.Exception ex)
{
Debug.LogError(string.Format("Exception loading JNI - {0}", ex));
}
}
private static void JNIFind()
{
try
{
{
string strMethod = "<init>";
_jmConstructor = AndroidJNI.GetMethodID(_jcByteArrayOutputStream, strMethod, "()V");
if (_jmConstructor != IntPtr.Zero)
{
#if VERBOSE_LOGGING
Debug.Log(string.Format("Found {0} method", strMethod));
#endif
}
else
{
Debug.LogError(string.Format("Failed to find {0} method", strMethod));
return;
}
}
{
string strMethod = "close";
_jmClose = AndroidJNI.GetMethodID(_jcByteArrayOutputStream, strMethod, "()V");
if (_jmClose != IntPtr.Zero)
{
#if VERBOSE_LOGGING
Debug.Log(string.Format("Found {0} method", strMethod));
#endif
}
else
{
Debug.LogError(string.Format("Failed to find {0} method", strMethod));
return;
}
}
{
string strMethod = "size";
_jmSize = AndroidJNI.GetMethodID(_jcByteArrayOutputStream, strMethod, "()I");
if (_jmSize != IntPtr.Zero)
{
#if VERBOSE_LOGGING
Debug.Log(string.Format("Found {0} method", strMethod));
#endif
}
else
{
Debug.LogError(string.Format("Failed to find {0} method", strMethod));
return;
}
}
{
string strMethod = "toByteArray";
_jmToByteArray = AndroidJNI.GetMethodID(_jcByteArrayOutputStream, strMethod, "()[B");
if (_jmToByteArray != IntPtr.Zero)
{
#if VERBOSE_LOGGING
Debug.Log(string.Format("Found {0} method", strMethod));
#endif
}
else
{
Debug.LogError(string.Format("Failed to find {0} method", strMethod));
return;
}
}
}
catch (System.Exception ex)
{
Debug.LogError(string.Format("Exception loading JNI - {0}", ex));
}
}
public ByteArrayOutputStream()
{
JNIFind();
_instance = AndroidJNI.AllocObject(_jcByteArrayOutputStream);
if (_instance == IntPtr.Zero)
{
Debug.LogError("Failed to allocate ByteArrayOutputStream");
return;
}
AndroidJNI.CallVoidMethod(_instance, _jmConstructor, new jvalue[0]);
#if VERBOSE_LOGGING
Debug.Log("Allocated ByteArrayOutputStream");
#endif
}
public IntPtr GetInstance()
{
return _instance;
}
public void Dispose()
{
if (_instance != IntPtr.Zero)
{
AndroidJNI.DeleteLocalRef(_instance);
_instance = IntPtr.Zero;
}
}
public void close()
{
JNIFind();
if (_instance == IntPtr.Zero)
{
Debug.LogError("_instance is not initialized");
return;
}
AndroidJNI.CallVoidMethod(_instance, _jmClose, new jvalue[0]);
}
public int size()
{
JNIFind();
if (_instance == IntPtr.Zero)
{
Debug.LogError("_instance is not initialized");
return 0;
}
int result = AndroidJNI.CallIntMethod(_instance, _jmSize, new jvalue[0]);
#if VERBOSE_LOGGING
Debug.Log(string.Format("ByteArrayOutputStream.size() == {0}", result));
#endif
return result;
}
public byte[] toByteArray()
{
JNIFind();
if (_instance == IntPtr.Zero)
{
Debug.LogError("_instance is not initialized");
return null;
}
if (_jmToByteArray == IntPtr.Zero)
{
Debug.LogError("_jmToByteArray is not initialized");
return null;
}
IntPtr result = AndroidJNI.CallObjectMethod(_instance, _jmToByteArray, new jvalue[0]);
if (result == IntPtr.Zero)
{
Debug.LogError("Failed to get byte array");
return null;
}
int count = AndroidJNI.GetArrayLength(result);
byte[] retVal = new byte[count];
for (int index = 0; index < count; ++index)
{
retVal[index] = AndroidJNI.GetByteArrayElement(result, index);
}
AndroidJNI.DeleteLocalRef(result);
return retVal;
}
}
}
#endif
| |
using AxoCover.Common.Extensions;
using AxoCover.Models.Editor;
using AxoCover.Models.Telemetry;
using AxoCover.Properties;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net;
using System.Reflection;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
namespace AxoCover.Models.Updater
{
public class ReleaseManager : IReleaseManager
{
private const string _defaultBranch = "release";
private const string _userName = "axodox";
private const string _repositoryName = "AxoCover";
private const string _assetName = "AxoCover.vsix";
private readonly TimeSpan _updateInterval = TimeSpan.FromDays(1);
private readonly Regex _nameRegex = new Regex(@"^(?<branch>.*?)-(?<version>\d+(?:\.\d+)*)$");
private readonly Regex _propertyRegex = new Regex(@"#(?<name>\w+):""(?<value>(?:[^""]|(?<=\\)"")*)""");
private readonly IEditorContext _editorContext;
private readonly ITelemetryManager _telemetryManager;
private readonly Mutex _installerMutex = new Mutex(false, "AxoCover.ReleaseManager");
public string DefaultBranch
{
get { return _defaultBranch; }
}
public bool IsUpdatingAutomatically
{
get { return Settings.Default.IsUpdatingAutomatically; }
set { Settings.Default.IsUpdatingAutomatically = value; }
}
public DateTime LastUpdateCheckTime
{
get { return Settings.Default.ReleaseListUpdateTime; }
private set { Settings.Default.ReleaseListUpdateTime = value; }
}
public Release[] Releases
{
get { return JsonConvert.DeserializeObject<Release[]>(Settings.Default.ReleaseListCache); }
private set { Settings.Default.ReleaseListCache = JsonConvert.SerializeObject(value); }
}
public string TargetBranch
{
get { return Settings.Default.ReleaseBranch; }
set { Settings.Default.ReleaseBranch = value; }
}
public Version CurrentVersion
{
get { return Settings.Default.ReleaseInstalled; }
private set { Settings.Default.ReleaseInstalled = value; }
}
public Version[] PreviousVersions
{
get { return JsonConvert.DeserializeObject<Version[]>(Settings.Default.ReleaseRollbackList); }
private set { Settings.Default.ReleaseRollbackList = JsonConvert.SerializeObject(value); }
}
public async Task<Release[]> GetReleases(bool isCaching = true)
{
var releases = isCaching ? Releases : new Release[0];
if (DateTime.Now - LastUpdateCheckTime > _updateInterval || !isCaching)
{
try
{
using (var webClient = new WebClient())
{
webClient.Headers.Add(HttpRequestHeader.UserAgent, _repositoryName + "ReleaseManager");
var result = await webClient.DownloadStringTaskAsync(new Uri($"https://api.github.com/repos/{_userName}/{_repositoryName}/releases"));
var releaseList = new List<Release>();
var jsonReleases = JsonConvert.DeserializeObject(result);
foreach (JObject jsonRelease in jsonReleases as JArray)
{
var jsonName = jsonRelease["name"] as JValue;
var nameMatch = _nameRegex.Match(jsonName.Value<string>());
if (!nameMatch.Success) continue;
var branch = nameMatch.Groups["branch"].Value;
var versionString = nameMatch.Groups["version"].Value;
while (versionString.Count(p => p == '.') < 3)
{
versionString += ".0";
}
var version = Version.Parse(versionString);
var jsonCreatedAt = jsonRelease["created_at"] as JValue;
var createdAt = DateTime.Parse(jsonCreatedAt.Value<string>());
var jsonAssets = jsonRelease["assets"] as JArray;
var jsonAsset = jsonAssets
.OfType<JObject>()
.FirstOrDefault(p => p["name"].Value<string>() == _assetName);
var uri = jsonAsset["browser_download_url"].Value<string>();
var jsonDescription = jsonRelease["body"] as JValue;
var description = jsonDescription.Value<string>();
var properties = _propertyRegex
.Matches(description)
.OfType<Match>()
.ToDictionary(p => p.Groups["name"].Value, p => p.Groups["value"].Value);
var release = new Release(branch, version, uri, createdAt, description, properties);
releaseList.Add(release);
}
Releases = releases = releaseList.ToArray();
LastUpdateCheckTime = DateTime.Now;
}
}
catch
{
if (!isCaching)
{
return new Release[0];
}
}
}
return releases;
}
public async Task<Release> GetTargetRelease(bool isCaching = true)
{
var releases = await GetReleases(isCaching);
//Get latest release on target branch
var targetRelease = releases.GetLatest(TargetBranch);
//If target branch has been merged switch to that branch instead
while (targetRelease != null && targetRelease.MergedTo != null)
{
TargetBranch = targetRelease.MergedTo;
targetRelease = releases.GetLatest(targetRelease.MergedTo);
}
//If we found no release switch to default branch
if (targetRelease == null)
{
targetRelease = releases.GetLatest(_defaultBranch);
if (targetRelease != null)
{
TargetBranch = _defaultBranch;
}
}
return targetRelease;
}
public async Task<bool> TryInstallRelease(Release release)
{
try
{
_installerMutex.WaitOne();
var downloadPath = Path.Combine(Path.GetTempPath(), "AxoCover." + release.Version + ".vsix");
try
{
if (File.Exists(downloadPath))
{
File.Delete(downloadPath);
}
}
catch
{
_editorContext.WriteToLog($"Cannot write to {downloadPath}. Maybe another installation is in progress?");
return false;
}
using (var webClient = new WebClient())
{
await webClient.DownloadFileTaskAsync(release.AlternativeUri ?? release.Uri, downloadPath);
}
return await Task.Run(async () =>
{
try
{
InstallVsix(downloadPath);
return true;
}
catch (Exception e)
{
await _telemetryManager.UploadExceptionAsync(e);
return false;
}
});
}
catch
{
return false;
}
finally
{
_installerMutex.ReleaseMutex();
}
}
private void InstallVsix(string downloadPath)
{
//Reflect on extension manager, so we can install beta builds without meddling with VS2017's crippled extension installer
var extensionManagerType = Type.GetType("Microsoft.VisualStudio.ExtensionManager.SVsExtensionManager, Microsoft.VisualStudio.ExtensionManager");
var extensionManagerInterface = Type.GetType("Microsoft.VisualStudio.ExtensionManager.IVsExtensionManager, Microsoft.VisualStudio.ExtensionManager");
var getInstalledExtensionsMethod = extensionManagerInterface.GetMethod("GetInstalledExtensions");
var createInstallableExtensionMethod = extensionManagerInterface.GetMethod("CreateInstallableExtension");
var getInstalledExtensionMethod = extensionManagerInterface.GetMethod("GetInstalledExtension");
var disableMethod = extensionManagerInterface.GetMethod("Disable");
var enableMethod = extensionManagerInterface.GetMethod("Enable");
var uninstallMethod = extensionManagerInterface.GetMethod("Uninstall");
var installMethod = extensionManagerInterface.GetMethod("Install");
var extensionInterface = Type.GetType("Microsoft.VisualStudio.ExtensionManager.IExtension, Microsoft.VisualStudio.ExtensionManager");
var headerProperty = extensionInterface.GetProperty("Header");
var nameProperty = headerProperty.PropertyType.GetProperty("Name");
var identifierProperty = headerProperty.PropertyType.GetProperty("Identifier");
//Try to resolve extension manager
var extensionManager = Microsoft.VisualStudio.Shell.Package.GetGlobalService(extensionManagerType);
if (extensionManager != null)
{
var oldExtension = (getInstalledExtensionsMethod.Invoke(extensionManager, new object[0]) as IEnumerable<object>)
.Where(p => nameProperty.GetValue(headerProperty.GetValue(p)) as string == "AxoCover").SingleOrDefault();
var newExtension = createInstallableExtensionMethod.Invoke(extensionManager, new object[] { downloadPath });
//Disable current extension
disableMethod.Invoke(extensionManager, new[] { oldExtension });
//Uninstall current extension
try
{
uninstallMethod.Invoke(extensionManager, new[] { oldExtension });
}
catch
{
enableMethod.Invoke(extensionManager, new[] { oldExtension });
throw;
}
//Install new extension
installMethod.Invoke(extensionManager, new[] { newExtension, false });
//Enable new extension
var newlyInstalledVersion = getInstalledExtensionMethod.Invoke(extensionManager, new[] { identifierProperty.GetValue(headerProperty.GetValue(newExtension)) });
if (newlyInstalledVersion != null)
{
enableMethod.Invoke(extensionManager, new[] { newlyInstalledVersion });
}
}
//If extension manager is missing try traditional VSIX installer
else
{
var vsixInstallerPath = Path.Combine(_editorContext.RootPath, "VSIXInstaller.exe");
if (Process.Start(vsixInstallerPath, $"/quiet /uninstall:{AxoCoverPackage.Id}").WaitForExitAndGetResult() != 0)
{
throw new Exception("VSIX uninstaller failed.");
}
if (Process.Start(vsixInstallerPath, $"/quiet \"{downloadPath}\"").WaitForExitAndGetResult() != 0)
{
throw new Exception("VSIX installer failed.");
}
}
}
public ReleaseManager(IEditorContext editorContext, ITelemetryManager telemetryManager)
{
_editorContext = editorContext;
_telemetryManager = telemetryManager;
Task.Run(Initialize);
}
private async Task Initialize()
{
var version = Assembly.GetExecutingAssembly().GetName().Version;
//After installing a new version update release history
if (version != CurrentVersion)
{
CurrentVersion = version;
var previousReleases = PreviousVersions.ToList();
previousReleases.Remove(version);
previousReleases.Insert(0, version);
PreviousVersions = previousReleases.ToArray();
}
//Initialize target branch for updates
if (string.IsNullOrEmpty(TargetBranch))
{
//Find current release
var releases = await GetReleases();
var currentRelease = releases.FirstOrDefault(p => p.Version == version);
//If we found the current release, then do auto updating
if (currentRelease != null)
{
TargetBranch = currentRelease.Branch;
IsUpdatingAutomatically = true;
}
//Otherwise set target branch to default, and do not enable auto updating
else
{
TargetBranch = _defaultBranch;
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Data.Entity;
using System.Linq;
using System.Reflection;
using NUnit.Framework;
using PeanutButter.FluentMigrator;
using PeanutButter.RandomGenerators;
using PeanutButter.TempDb;
using PeanutButter.TempDb.LocalDb;
using PeanutButter.TestUtils.Generic;
using PeanutButter.Utils;
using PeanutButter.Utils.Entity;
// ReSharper disable StaticMemberInGenericType
namespace PeanutButter.TestUtils.Entity
{
public class EntityPersistenceFluentStateBase
{
protected static List<ITempDB> SharedDatabasesWhichHaveBeenMigrated { get; } = new List<ITempDB>();
protected static readonly object SharedMigrationsLock = new object();
}
public class EntityPersistenceFluentState<TContext, TEntity> : EntityPersistenceFluentStateBase
where TContext : DbContext
where TEntity : class
{
private readonly Func<DbConnection, TContext> _contextFactory;
private Func<TContext, IDbSet<TEntity>> _collectionNabberFromContext;
private ITempDB _tempDb;
private readonly List<Action<TContext, TEntity>> _runBeforePersisting = new List<Action<TContext, TEntity>>();
private readonly List<Action<TEntity, TEntity>> _runAfterPersisting = new List<Action<TEntity, TEntity>>();
private const string CREATED = "Created";
private const string LAST_MODIFIED = "LastModified";
private const string ENABLED = "Enabled";
private readonly string[] _ignoreEntityBaseFields = {CREATED, LAST_MODIFIED, ENABLED};
private string[] _ignoredProperties;
private Func<TEntity> _entityFactory;
private Action<string> _contextLogAction;
// ReSharper disable once StaticMemberInGenericType
private static readonly IEnumerable<PropertyInfo> DecimalProps;
private static readonly IEnumerable<PropertyInfo> DateTimeProps;
private Func<string, IDBMigrationsRunner> _migrationsRunnerFactory;
private Func<ITempDB> _tempDbFactoryFunction;
private ITempDB _sharedDatabase;
private Action<string> _logAction;
private bool _suppressMigrationsWarning;
private TimeSpan _allowedDateTimeDelta;
// ReSharper disable once StaticMemberInGenericType
public EntityPersistenceFluentState(Func<TEntity> entityFactory, Func<DbConnection, TContext> contextFactory = null)
{
_entityFactory = entityFactory;
_contextFactory = contextFactory ?? CreateContext;
_entityFactory = BuildWithBuilder;
_logAction = Console.WriteLine;
_allowedDateTimeDelta = EntityPersistenceFluentStateConstants.FiftyMilliseconds;
}
static EntityPersistenceFluentState()
{
var allProperties = typeof(TEntity)
.GetProperties();
DecimalProps = allProperties
.Where(pi => pi.PropertyType == typeof(decimal) || pi.PropertyType == typeof(decimal?));
DateTimeProps = allProperties
.Where(pi => pi.PropertyType == typeof(DateTime) || pi.PropertyType == typeof(DateTime?));
}
public EntityPersistenceFluentState<TContext, TEntity> WithDbMigrator(Func<string, IDBMigrationsRunner> migrationsRunnerFactory)
{
_migrationsRunnerFactory = migrationsRunnerFactory;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithCollection(Func<TContext, IDbSet<TEntity>> nabber)
{
_collectionNabberFromContext = nabber;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> BeforePersisting(Action<TContext, TEntity> toRun)
{
_runBeforePersisting.Add(toRun);
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> AfterPersisting(Action<TEntity, TEntity> runAfterPersisting)
{
_runAfterPersisting.Add(runAfterPersisting);
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithIgnoredProperties(params string[] propertyNames)
{
_ignoredProperties = propertyNames;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithEntityFactory(Func<TEntity> factoryFunc)
{
_entityFactory = factoryFunc ?? BuildWithBuilder;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithEntityFrameworkLogger(Action<string> logAction)
{
_contextLogAction = logAction;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithLogAction(Action<string> logAction)
{
_logAction = logAction;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithAllowedDateTimePropertyDelta(TimeSpan timeSpan)
{
_allowedDateTimeDelta = timeSpan;
return this;
}
private IDbSet<TEntity> GetCollection(TContext context)
{
if (_collectionNabberFromContext != null)
return _collectionNabberFromContext(context);
return context.Set<TEntity>();
}
private TContext CreateContext(DbConnection dbConnection)
{
var context = (TContext) Activator.CreateInstance(typeof(TContext), dbConnection);
if (_contextLogAction != null)
context.Database.Log = _contextLogAction;
return context;
}
private TEntity BuildWithBuilder()
{
var entityType = typeof(TEntity);
var builderType = GenericBuilderLocator.TryFindExistingBuilderFor(entityType)
?? GenericBuilderLocator.FindOrGenerateDynamicBuilderFor(entityType);
Assert.IsNotNull(builderType, $"Can't find or create a builder for {entityType.Name}");
var builder = Activator.CreateInstance(builderType) as IGenericBuilder;
Assert.IsNotNull(builder, $"Located builder {builderType.Name} does not implement IGenericBuilder");
var entity = builder.GenericWithRandomProps().GenericBuild() as TEntity;
Assert.IsNotNull(entity, $"located builder {builderType.Name} for {entityType.Name} builds NULL or invalid entity");
return entity;
}
public EntityPersistenceFluentState<TContext, TEntity> WithBuilder<TEntityBuilder>()
where TEntityBuilder : GenericBuilder<TEntityBuilder, TEntity>
{
_entityFactory = BuildWithBuilder;
return this;
}
public void ShouldPersistAndRecall()
{
//---------------Set up test pack-------------------
var sut = _entityFactory();
var toIgnore = new List<string>(_ignoredProperties
.EmptyIfNull()
.Union(typeof(TEntity).VirtualProperties()));
AttemptToPersistWith(sut, toIgnore);
ValidatePersistenceWith(toIgnore, sut);
if (_tempDb == _sharedDatabase)
return;
_tempDb.Dispose();
}
private void ValidatePersistenceWith(List<string> toIgnore, TEntity sut)
{
using (var ctx = GetContext())
{
var persisted = GetPersistedEntityFrom(ctx);
Assert.IsNotNull(persisted, "No entity of type '" + typeof(TEntity).FullName + "' found in context after saving!");
var entityType = typeof(TEntity);
var idProp = entityType.GetProperties().FirstOrDefault(pi => pi.Name.ToLower() == entityType.Name.ToLower() + "id");
if (idProp != null && !toIgnore.EmptyIfNull().Contains(idProp.Name))
Assert.AreNotEqual(0, idProp.GetValue(persisted));
var ignoreAndCrankyProperties = toIgnore.Union(
DecimalProps.Union(DateTimeProps).Select(pi => pi.Name)
).ToArray();
PropertyAssert.AreDeepEqual(persisted, sut, ignoreAndCrankyProperties);
TestDecimalPropertiesOn(sut, persisted);
TestDateTimePropertiesOn(sut, persisted);
_runAfterPersisting.ForEach(a => a.Invoke(sut, persisted));
}
}
private void TestDateTimePropertiesOn(TEntity sut, TEntity persisted)
{
var allowed = Math.Abs(_allowedDateTimeDelta.TotalMilliseconds);
DateTimeProps.ForEach(pi =>
{
var beforeValue = (DateTime?) pi.GetValue(sut);
var afterValue = (DateTime?) pi.GetValue(persisted);
if (beforeValue.HasValue && afterValue.HasValue)
{
var delta = Math.Abs((beforeValue.Value - afterValue.Value).TotalMilliseconds);
Assert.That(delta, Is.LessThanOrEqualTo(allowed),
$"Property mismatch: expected {pi.Name} to persist and recall with an accuracy within {allowed} ms");
return;
}
if (beforeValue.HasValue != afterValue.HasValue)
{
Assert.Fail($"Property mismatch for {pi.Name}: expected {beforeValue} but got {afterValue}");
}
});
}
private static void TestDecimalPropertiesOn(TEntity sut, TEntity persisted)
{
foreach (var pi in DecimalProps)
{
var beforeValue = (decimal?) pi.GetValue(sut);
var afterValue = (decimal?) pi.GetValue(persisted);
if (beforeValue.HasValue && afterValue.HasValue)
{
afterValue.Value.ShouldMatch(beforeValue.Value);
continue;
}
if (beforeValue.HasValue != afterValue.HasValue)
Assert.Fail($"Property mismatch for {pi.Name}: expected {beforeValue} but got {afterValue}");
}
}
private TEntity GetPersistedEntityFrom(TContext ctx)
{
return GetCollection(ctx).FirstOrDefault();
}
private void AttemptToPersistWith(TEntity sut, List<string> toIgnore)
{
using (var ctx = GetContext())
{
if (ctx is DbContextWithAutomaticTrackingFields)
{
var entity = sut as EntityBase;
if (entity != null)
{
entity.Created = default(DateTime);
toIgnore.AddRange(_ignoreEntityBaseFields);
}
}
var beforeTest = DateTime.Now;
//---------------Assert Precondition----------------
_runBeforePersisting.ForEach(a => a.Invoke(ctx, sut));
Assert.IsFalse(SomeEntitiesAlreadyExistIn(ctx), "Some entities already exist. Please clear out your context before running this test");
//---------------Execute Test ----------------------
GetCollection(ctx).Add(sut);
ctx.SaveChangesWithErrorReporting();
var afterTest = DateTime.Now;
//---------------Test Result -----------------------
if (ctx is DbContextWithAutomaticTrackingFields)
{
var entity = sut as EntityBase;
if (entity != null)
{
Assert.That((object) entity.Created, Is.GreaterThanOrEqualTo(beforeTest));
Assert.That((object) entity.Created, Is.LessThanOrEqualTo(afterTest));
Assert.IsNull(entity.LastModified);
Assert.IsTrue(entity.Enabled);
// modify to test LastModified
beforeTest = DateTime.Now;
entity.LastModified = DateTime.MinValue;
ctx.SaveChangesWithErrorReporting();
afterTest = DateTime.Now;
Assert.That((object) entity.LastModified, Is.GreaterThanOrEqualTo(beforeTest));
Assert.That((object) entity.LastModified, Is.LessThanOrEqualTo(afterTest));
}
}
}
}
private bool SomeEntitiesAlreadyExistIn(TContext ctx)
{
return GetCollection(ctx).Any();
}
private TContext GetContext()
{
return _contextFactory(GetConnection());
}
private DbConnection GetConnection()
{
var tempDb = GetTempDb();
return tempDb.OpenConnection();
}
private ITempDB GetTempDb()
{
return _tempDb ?? (_tempDb = GetSharedDatabase() ?? CreateTempDb());
}
private ITempDB GetSharedDatabase()
{
if (_sharedDatabase == null)
return null;
lock (SharedMigrationsLock)
{
if (SharedDatabasesWhichHaveBeenMigrated.Contains(_sharedDatabase))
return _sharedDatabase;
SharedDatabasesWhichHaveBeenMigrated.Add(_sharedDatabase);
return MigrateUpOn(_sharedDatabase);
}
}
private ITempDB CreateTempDb()
{
var db = _tempDbFactoryFunction?.Invoke() ?? new TempDBLocalDb();
return MigrateUpOn(db);
}
private ITempDB MigrateUpOn(ITempDB db)
{
if (_migrationsRunnerFactory == null)
{
if (!_suppressMigrationsWarning)
_logAction(
@"WARNING: running tests without specified DBMigrationsRunner.
EntityFramework will perform migrations, which probably won't test what you want to.
To suppress this message, include .SuppressMissingMigratorMessage() in your fluent call chain.");
return db;
}
var runner = _migrationsRunnerFactory(db.ConnectionString);
runner.MigrateToLatest();
return db;
}
public EntityPersistenceFluentState<TContext, TEntity> WithTempDbFactory(Func<ITempDB> factoryFunction)
{
_tempDbFactoryFunction = factoryFunction;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> WithSharedDatabase(ITempDB tempDb)
{
_sharedDatabase = tempDb;
return this;
}
public EntityPersistenceFluentState<TContext, TEntity> SuppressMissingMigratorMessage()
{
_suppressMigrationsWarning = true;
return this;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using Xunit;
namespace System.Linq.Tests
{
public class AverageTests : EnumerableTests
{
[Fact]
public void SameResultsRepeatCallsIntQuery()
{
var q = from x in new[] { 9999, 0, 888, -1, 66, -777, 1, 2, -12345 }
where x > int.MinValue
select x;
Assert.Equal(q.Average(), q.Average());
}
[Fact]
public void SameResultsRepeatCallsNullableLongQuery()
{
var q = from x in new long?[] { int.MaxValue, 0, 255, 127, 128, 1, 33, 99, null, int.MinValue }
select x;
Assert.Equal(q.Average(), q.Average());
}
public static IEnumerable<object[]> NullableFloat_TestData()
{
yield return new object[] { new float?[0], null };
yield return new object[] { new float?[] { float.MinValue }, float.MinValue };
yield return new object[] { new float?[] { 0f, 0f, 0f, 0f, 0f }, 0f };
yield return new object[] { new float?[] { 5.5f, 0, null, null, null, 15.5f, 40.5f, null, null, -23.5f }, 7.6f };
yield return new object[] { new float?[] { null, null, null, null, 45f }, 45f };
yield return new object[] { new float?[] { null, null, null, null, null }, null };
}
[Theory]
[MemberData(nameof(NullableFloat_TestData))]
public void NullableFoat(float?[] source, float? expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void NullableFloat_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<float?>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<float?>)null).Average(i => i));
}
[Fact]
public void NullableFloat_NullSelector_ThrowsArgumentNullException()
{
Func<float?, float?> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float?>().Average(selector));
}
[Fact]
public void NullableFloat_WithSelector()
{
var source = new []
{
new { name = "Tim", num = (float?)5.5f },
new { name = "John", num = (float?)15.5f },
new { name = "Bob", num = default(float?) }
};
float? expected = 10.5f;
Assert.Equal(expected, source.Average(e => e.num));
}
[Fact]
public void Int_EmptySource_ThrowsInvalidOperationException()
{
int[] source = new int[0];
Assert.Throws<InvalidOperationException>(() => source.Average());
Assert.Throws<InvalidOperationException>(() => source.Average(i => i));
}
[Fact]
public void Int_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<int>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<int>)null).Average(i => i));
}
[Fact]
public void Int_NullSelector_ThrowsArgumentNullException()
{
Func<int, int> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int>().Average(selector));
}
public static IEnumerable<object[]> Int_TestData()
{
yield return new object[] { new int[] { 5 }, 5 };
yield return new object[] { new int[] { 0, 0, 0, 0, 0 }, 0 };
yield return new object[] { new int[] { 5, -10, 15, 40, 28 }, 15.6 };
}
[Theory]
[MemberData(nameof(Int_TestData))]
public void Int(int[] source, double expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void Int_WithSelector()
{
var source = new []
{
new { name="Tim", num = 10 },
new { name="John", num = -10 },
new { name="Bob", num = 15 }
};
double expected = 5;
Assert.Equal(expected, source.Average(e => e.num));
}
public static IEnumerable<object[]> NullableInt_TestData()
{
yield return new object[] { new int?[0], null };
yield return new object[] { new int?[] { -5 }, -5.0 };
yield return new object[] { new int?[] { 0, 0, 0, 0, 0 }, 0.0 };
yield return new object[] { new int?[] { 5, -10, null, null, null, 15, 40, 28, null, null }, 15.6 };
yield return new object[] { new int?[] { null, null, null, null, 50 }, 50.0 };
yield return new object[] { new int?[] { null, null, null, null, null }, null };
}
[Theory]
[MemberData(nameof(NullableInt_TestData))]
public void NullableInt(int?[] source, double? expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void NullableInt_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<int?>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<int?>)null).Average(i => i));
}
[Fact]
public void NullableInt_NullSelector_ThrowsArgumentNullException()
{
Func<int?, int?> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int?>().Average(selector));
}
[Fact]
public void NullableInt_WithSelector()
{
var source = new []
{
new { name = "Tim", num = (int?)10 },
new { name = "John", num = default(int?) },
new { name = "Bob", num = (int?)10 }
};
double? expected = 10;
Assert.Equal(expected, source.Average(e => e.num));
}
[Fact]
public void Long_EmptySource_ThrowsInvalidOperationException()
{
long[] source = new long[0];
Assert.Throws<InvalidOperationException>(() => source.Average());
Assert.Throws<InvalidOperationException>(() => source.Average(i => i));
}
[Fact]
public void Long_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<long>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<long>)null).Average(i => i));
}
[Fact]
public void Long_NullSelector_ThrowsArgumentNullException()
{
Func<long, long> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long>().Average(selector));
}
public static IEnumerable<object[]> Long_TestData()
{
yield return new object[] { new long[] { long.MaxValue }, long.MaxValue };
yield return new object[] { new long[] { 0, 0, 0, 0, 0 }, 0 };
yield return new object[] { new long[] { 5, -10, 15, 40, 28 }, 15.6 };
}
[Theory]
[MemberData(nameof(Long_TestData))]
public void Long(long[] source, double expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void Long_FromSelector()
{
var source = new []
{
new { name = "Tim", num = 40L },
new { name = "John", num = 50L },
new { name = "Bob", num = 60L }
};
double expected = 50;
Assert.Equal(expected, source.Average(e => e.num));
}
[Fact]
public void Long_SumTooLarge_ThrowsOverflowException()
{
long[] source = new long[] { long.MaxValue, long.MaxValue };
Assert.Throws<OverflowException>(() => source.Average());
}
public static IEnumerable<object[]> NullableLong_TestData()
{
yield return new object[] { new long?[0], null };
yield return new object[] { new long?[] { long.MaxValue }, (double)long.MaxValue };
yield return new object[] { new long?[] { 0, 0, 0, 0, 0 }, 0.0 };
yield return new object[] { new long?[] { 5, -10, null, null, null, 15, 40, 28, null, null }, 15.6 };
yield return new object[] { new long?[] { null, null, null, null, 50 }, 50.0 };
yield return new object[] { new long?[] { null, null, null, null, null }, null };
}
[Theory]
[MemberData(nameof(NullableLong_TestData))]
public void NullableLong(long?[] source, double? expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void NullableLong_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<long?>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<long?>)null).Average(i => i));
}
[Fact]
public void NullableLong_NullSelector_ThrowsArgumentNullException()
{
Func<long?, long?> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long?>().Average(selector));
}
[Fact]
public void NullableLong_WithSelector()
{
var source = new []
{
new { name = "Tim", num = (long?)40L },
new { name = "John", num = default(long?) },
new { name = "Bob", num = (long?)30L }
};
double? expected = 35;
Assert.Equal(expected, source.Average(e => e.num));
}
[Fact]
public void Double_EmptySource_ThrowsInvalidOperationException()
{
double[] source = new double[0];
Assert.Throws<InvalidOperationException>(() => source.Average());
Assert.Throws<InvalidOperationException>(() => source.Average(i => i));
}
[Fact]
public void Double_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<double>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<double>)null).Average(i => i));
}
[Fact]
public void Double_NullSelector_ThrowsArgumentNullException()
{
Func<double, double> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double>().Average(selector));
}
public static IEnumerable<object[]> Double_TestData()
{
yield return new object[] { new double[] { double.MaxValue }, double.MaxValue };
yield return new object[] { new double[] { 0.0, 0.0, 0.0, 0.0, 0.0 }, 0 };
yield return new object[] { new double[] { 5.5, -10, 15.5, 40.5, 28.5 }, 16 };
yield return new object[] { new double[] { 5.58, Double.NaN, 30, 4.55, 19.38 }, double.NaN };
}
[Theory]
[MemberData(nameof(Double_TestData))]
public void Average_Double(double[] source, double expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void Double_WithSelector()
{
var source = new []
{
new { name = "Tim", num = 5.5},
new { name = "John", num = 15.5},
new { name = "Bob", num = 3.0}
};
double expected = 8.0;
Assert.Equal(expected, source.Average(e => e.num));
}
public static IEnumerable<object[]> NullableDouble_TestData()
{
yield return new object[] { new double?[0], null };
yield return new object[] { new double?[] { double.MinValue }, double.MinValue };
yield return new object[] { new double?[] { 0, 0, 0, 0, 0 }, 0.0 };
yield return new object[] { new double?[] { 5.5, 0, null, null, null, 15.5, 40.5, null, null, -23.5 }, 7.6 };
yield return new object[] { new double?[] { null, null, null, null, 45 }, 45.0 };
yield return new object[] { new double?[] { -23.5, 0, double.NaN, 54.3, 0.56 }, double.NaN };
yield return new object[] { new double?[] { null, null, null, null, null }, null };
}
[Theory]
[MemberData(nameof(NullableDouble_TestData))]
public void NullableDouble(double?[] source, double? expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void NullableDouble_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<double?>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<double?>)null).Average(i => i));
}
[Fact]
public void NullableDouble_NullSelector_ThrowsArgumentNullException()
{
Func<double?, double?> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double?>().Average(selector));
}
[Fact]
public void NullableDouble_WithSelector()
{
var source = new[]
{
new{ name = "Tim", num = (double?)5.5 },
new{ name = "John", num = (double?)15.5 },
new{ name = "Bob", num = default(double?) }
};
double? expected = 10.5;
Assert.Equal(expected, source.Average(e => e.num));
}
[Fact]
public void Decimal_EmptySource_ThrowsInvalidOperationException()
{
decimal[] source = new decimal[0];
Assert.Throws<InvalidOperationException>(() => source.Average());
Assert.Throws<InvalidOperationException>(() => source.Average(i => i));
}
[Fact]
public void Decimal_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal>)null).Average(i => i));
}
[Fact]
public void Decimal_NullSelector_ThrowsArgumentNullException()
{
Func<decimal, decimal> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal>().Average(selector));
}
public static IEnumerable<object[]> Decimal_TestData()
{
yield return new object[] { new decimal[] { decimal.MaxValue }, decimal.MaxValue };
yield return new object[] { new decimal[] { 0.0m, 0.0m, 0.0m, 0.0m, 0.0m }, 0 };
yield return new object[] { new decimal[] { 5.5m, -10m, 15.5m, 40.5m, 28.5m }, 16 };
}
[Theory]
[MemberData(nameof(Decimal_TestData))]
public void Decimal(decimal[] source, decimal expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void Decimal_WithSelector()
{
var source = new[]
{
new{ name = "Tim", num = 5.5m},
new{ name = "John", num = 15.5m},
new{ name = "Bob", num = 3.0m}
};
decimal expected = 8.0m;
Assert.Equal(expected, source.Average(e => e.num));
}
public static IEnumerable<object[]> NullableDecimal_TestData()
{
yield return new object[] { new decimal?[0], null };
yield return new object[] { new decimal?[] { decimal.MinValue }, decimal.MinValue };
yield return new object[] { new decimal?[] { 0m, 0m, 0m, 0m, 0m }, 0m };
yield return new object[] { new decimal?[] { 5.5m, 0, null, null, null, 15.5m, 40.5m, null, null, -23.5m }, 7.6m };
yield return new object[] { new decimal?[] { null, null, null, null, 45m }, 45m };
yield return new object[] { new decimal?[] { null, null, null, null, null }, null };
}
[Theory]
[MemberData(nameof(NullableDecimal_TestData))]
public void NullableDecimal(decimal?[] source, decimal? expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void NullableDecimal_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal?>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<decimal?>)null).Average(i => i));
}
[Fact]
public void NullableDecimal_NullSelector_ThrowsArgumentNullException()
{
Func<decimal?, decimal?> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal?>().Average(selector));
}
[Fact]
public void NullableDecimal_WithSelector()
{
var source = new[]
{
new{ name = "Tim", num = (decimal?)5.5m},
new{ name = "John", num = (decimal?)15.5m},
new{ name = "Bob", num = (decimal?)null}
};
decimal? expected = 10.5m;
Assert.Equal(expected, source.Average(e => e.num));
}
[Fact]
public void NullableDecimal_SumTooLarge_ThrowsOverflowException()
{
decimal?[] source = new decimal?[] { decimal.MaxValue, decimal.MaxValue };
Assert.Throws<OverflowException>(() => source.Average());
}
[Fact]
public void Float_EmptySource_ThrowsInvalidOperationException()
{
float[] source = new float[0];
Assert.Throws<InvalidOperationException>(() => source.Average());
Assert.Throws<InvalidOperationException>(() => source.Average(i => i));
}
[Fact]
public void Float_NullSource_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<float>)null).Average());
Assert.Throws<ArgumentNullException>("source", () => ((IEnumerable<float>)null).Average(i => i));
}
[Fact]
public void Float_NullSelector_ThrowsArgumentNullException()
{
Func<float, float> selector = null;
Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float>().Average(selector));
}
public static IEnumerable<object[]> Float_TestData()
{
yield return new object[] { new float[] { float.MaxValue }, float.MaxValue };
yield return new object[] { new float[] { 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }, 0f };
yield return new object[] { new float[] { 5.5f, -10f, 15.5f, 40.5f, 28.5f }, 16f };
}
[Theory]
[MemberData(nameof(Float_TestData))]
public void Float(float[] source, float expected)
{
Assert.Equal(expected, source.Average());
Assert.Equal(expected, source.Average(x => x));
}
[Fact]
public void Float_WithSelector()
{
var source = new[]
{
new{ name = "Tim", num = 5.5f},
new{ name = "John", num = 15.5f},
new{ name = "Bob", num = 3.0f}
};
float expected = 8.0f;
Assert.Equal(expected, source.Average(e => e.num));
}
}
}
| |
// Copyright 2010 The Noda Time Authors. All rights reserved.
// Use of this source code is governed by the Apache License 2.0,
// as found in the LICENSE.txt file.
using System;
using NodaTime.Calendars;
using NodaTime.Text;
using NUnit.Framework;
namespace NodaTime.Test
{
public class PeriodTest
{
// June 19th 2010, 2:30:15am
private static readonly LocalDateTime TestDateTime1 = new LocalDateTime(2010, 6, 19, 2, 30, 15);
// June 19th 2010, 4:45:10am
private static readonly LocalDateTime TestDateTime2 = new LocalDateTime(2010, 6, 19, 4, 45, 10);
// June 19th 2010
private static readonly LocalDate TestDate1 = new LocalDate(2010, 6, 19);
// March 1st 2011
private static readonly LocalDate TestDate2 = new LocalDate(2011, 3, 1);
// March 1st 2012
private static readonly LocalDate TestDate3 = new LocalDate(2012, 3, 1);
private const PeriodUnits HoursMinutesPeriodType = PeriodUnits.Hours | PeriodUnits.Minutes;
private static readonly PeriodUnits[] AllPeriodUnits = (PeriodUnits[])Enum.GetValues(typeof(PeriodUnits));
[Test]
public void BetweenLocalDateTimes_WithoutSpecifyingUnits_OmitsWeeks()
{
Period actual = Period.Between(new LocalDateTime(2012, 2, 21, 0, 0), new LocalDateTime(2012, 2, 28, 0, 0));
Period expected = Period.FromDays(7);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_MovingForwardWithAllFields_GivesExactResult()
{
Period actual = Period.Between(TestDateTime1, TestDateTime2);
Period expected = Period.FromHours(2) + Period.FromMinutes(14) + Period.FromSeconds(55);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_MovingBackwardWithAllFields_GivesExactResult()
{
Period actual = Period.Between(TestDateTime2, TestDateTime1);
Period expected = Period.FromHours(-2) + Period.FromMinutes(-14) + Period.FromSeconds(-55);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_MovingForwardWithHoursAndMinutes_RoundsTowardsStart()
{
Period actual = Period.Between(TestDateTime1, TestDateTime2, HoursMinutesPeriodType);
Period expected = Period.FromHours(2) + Period.FromMinutes(14);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_MovingBackwardWithHoursAndMinutes_RoundsTowardsStart()
{
Period actual = Period.Between(TestDateTime2, TestDateTime1, HoursMinutesPeriodType);
Period expected = Period.FromHours(-2) + Period.FromMinutes(-14);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_AcrossDays()
{
Period expected = Period.FromHours(23) + Period.FromMinutes(59);
Period actual = Period.Between(TestDateTime1, TestDateTime1.PlusDays(1).PlusMinutes(-1));
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_AcrossDays_MinutesAndSeconds()
{
Period expected = Period.FromMinutes(24 * 60 - 1) + Period.FromSeconds(59);
Period actual = Period.Between(TestDateTime1, TestDateTime1.PlusDays(1).PlusSeconds(-1), PeriodUnits.Minutes | PeriodUnits.Seconds);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDateTimes_NotInt64Representable()
{
LocalDateTime start = new LocalDateTime(-5000, 1, 1, 0, 1, 2, 123);
LocalDateTime end = new LocalDateTime( 9000, 1, 1, 1, 2, 3, 456);
Assert.False((end.ToLocalInstant().TimeSinceLocalEpoch - start.ToLocalInstant().TimeSinceLocalEpoch).IsInt64Representable);
Period expected = new PeriodBuilder
{
// 365.2425 * 14000 = 5113395
Hours = 5113395L * 24 + 1,
Minutes = 1,
Seconds = 1,
Milliseconds = 333
}.Build();
Period actual = Period.Between(start, end, PeriodUnits.AllTimeUnits);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_InvalidUnits()
{
Assert.Throws<ArgumentException>(() => Period.Between(TestDate1, TestDate2, 0));
Assert.Throws<ArgumentException>(() => Period.Between(TestDate1, TestDate2, (PeriodUnits) (-1)));
Assert.Throws<ArgumentException>(() => Period.Between(TestDate1, TestDate2, PeriodUnits.AllTimeUnits));
Assert.Throws<ArgumentException>(() => Period.Between(TestDate1, TestDate2, PeriodUnits.Years | PeriodUnits.Hours));
}
[Test]
public void BetweenLocalDates_DifferentCalendarSystems_Throws()
{
LocalDate start = new LocalDate(2017, 11, 1, CalendarSystem.Coptic);
LocalDate end = new LocalDate(2017, 11, 5, CalendarSystem.Gregorian);
Assert.Throws<ArgumentException>(() => Period.Between(start, end));
}
[Test]
[TestCase("2016-05-16", "2019-03-13", PeriodUnits.Years, 2)]
[TestCase("2016-05-16", "2017-07-13", PeriodUnits.Months, 13)]
[TestCase("2016-05-16", "2016-07-13", PeriodUnits.Weeks, 8)]
[TestCase("2016-05-16", "2016-07-13", PeriodUnits.Days, 58)]
public void BetweenLocalDates_SingleUnit(string startText, string endText, PeriodUnits units, int expectedValue)
{
var start = LocalDatePattern.Iso.Parse(startText).Value;
var end = LocalDatePattern.Iso.Parse(endText).Value;
var actual = Period.Between(start, end, units);
var expected = new PeriodBuilder { [units] = expectedValue }.Build();
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_MovingForwardNoLeapYears_WithExactResults()
{
Period actual = Period.Between(TestDate1, TestDate2);
Period expected = Period.FromMonths(8) + Period.FromDays(10);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_MovingForwardInLeapYear_WithExactResults()
{
Period actual = Period.Between(TestDate1, TestDate3);
Period expected = Period.FromYears(1) + Period.FromMonths(8) + Period.FromDays(11);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_MovingBackwardNoLeapYears_WithExactResults()
{
Period actual = Period.Between(TestDate2, TestDate1);
Period expected = Period.FromMonths(-8) + Period.FromDays(-12);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_MovingBackwardInLeapYear_WithExactResults()
{
// This is asymmetric with moving forward, because we first take off a whole year, which
// takes us to March 1st 2011, then 8 months to take us to July 1st 2010, then 12 days
// to take us back to June 19th. In this case, the fact that our start date is in a leap
// year had no effect.
Period actual = Period.Between(TestDate3, TestDate1);
Period expected = Period.FromYears(-1) + Period.FromMonths(-8) + Period.FromDays(-12);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_MovingForward_WithJustMonths()
{
Period actual = Period.Between(TestDate1, TestDate3, PeriodUnits.Months);
Period expected = Period.FromMonths(20);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_MovingBackward_WithJustMonths()
{
Period actual = Period.Between(TestDate3, TestDate1, PeriodUnits.Months);
Period expected = Period.FromMonths(-20);
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalDates_AsymmetricForwardAndBackward()
{
// February 10th 2010
LocalDate d1 = new LocalDate(2010, 2, 10);
// March 30th 2010
LocalDate d2 = new LocalDate(2010, 3, 30);
// Going forward, we go to March 10th (1 month) then March 30th (20 days)
Assert.AreEqual(Period.FromMonths(1) + Period.FromDays(20), Period.Between(d1, d2));
// Going backward, we go to February 28th (-1 month, day is rounded) then February 10th (-18 days)
Assert.AreEqual(Period.FromMonths(-1) + Period.FromDays(-18), Period.Between(d2, d1));
}
[Test]
public void BetweenLocalDates_EndOfMonth()
{
LocalDate d1 = new LocalDate(2013, 3, 31);
LocalDate d2 = new LocalDate(2013, 4, 30);
Assert.AreEqual(Period.FromMonths(1), Period.Between(d1, d2));
Assert.AreEqual(Period.FromDays(-30), Period.Between(d2, d1));
}
[Test]
public void BetweenLocalDates_OnLeapYear()
{
LocalDate d1 = new LocalDate(2012, 2, 29);
LocalDate d2 = new LocalDate(2013, 2, 28);
Assert.AreEqual(Period.FromYears(1), Period.Between(d1, d2));
// Go back from February 28th 2013 to March 28th 2012, then back 28 days to February 29th 2012
Assert.AreEqual(Period.FromMonths(-11) + Period.FromDays(-28), Period.Between(d2, d1));
}
[Test]
public void BetweenLocalDates_AfterLeapYear()
{
LocalDate d1 = new LocalDate(2012, 3, 5);
LocalDate d2 = new LocalDate(2013, 3, 5);
Assert.AreEqual(Period.FromYears(1), Period.Between(d1, d2));
Assert.AreEqual(Period.FromYears(-1), Period.Between(d2, d1));
}
[Test]
public void BetweenLocalDateTimes_OnLeapYear()
{
LocalDateTime dt1 = new LocalDateTime(2012, 2, 29, 2, 0);
LocalDateTime dt2 = new LocalDateTime(2012, 2, 29, 4, 0);
LocalDateTime dt3 = new LocalDateTime(2013, 2, 28, 3, 0);
Assert.AreEqual(Parse("P1YT1H"), Period.Between(dt1, dt3));
Assert.AreEqual(Parse("P11M29DT23H"), Period.Between(dt2, dt3));
Assert.AreEqual(Parse("P-11M-28DT-1H"), Period.Between(dt3, dt1));
Assert.AreEqual(Parse("P-11M-27DT-23H"), Period.Between(dt3, dt2));
}
[Test]
public void BetweenLocalDateTimes_OnLeapYearIslamic()
{
var calendar = CalendarSystem.GetIslamicCalendar(IslamicLeapYearPattern.Base15, IslamicEpoch.Civil);
Assert.IsTrue(calendar.IsLeapYear(2));
Assert.IsFalse(calendar.IsLeapYear(3));
LocalDateTime dt1 = new LocalDateTime(2, 12, 30, 2, 0, calendar);
LocalDateTime dt2 = new LocalDateTime(2, 12, 30, 4, 0, calendar);
LocalDateTime dt3 = new LocalDateTime(3, 12, 29, 3, 0, calendar);
// Adding a year truncates to 0003-12-28T02:00:00, then add an hour.
Assert.AreEqual(Parse("P1YT1H"), Period.Between(dt1, dt3));
// Adding a year would overshoot. Adding 11 months takes us to month 03-11-30T04:00.
// Adding another 28 days takes us to 03-12-28T04:00, then add another 23 hours to finish.
Assert.AreEqual(Parse("P11M28DT23H"), Period.Between(dt2, dt3));
// Subtracting 11 months takes us to 03-01-29T03:00. Subtracting another 29 days
// takes us to 02-12-30T03:00, and another hour to get to the target.
Assert.AreEqual(Parse("P-11M-29DT-1H"), Period.Between(dt3, dt1));
Assert.AreEqual(Parse("P-11M-28DT-23H"), Period.Between(dt3, dt2));
}
[Test]
public void BetweenLocalDateTimes_InvalidUnits()
{
Assert.Throws<ArgumentException>(() => Period.Between(TestDate1, TestDate2, 0));
Assert.Throws<ArgumentException>(() => Period.Between(TestDate1, TestDate2, (PeriodUnits)(-1)));
}
[Test]
public void BetweenLocalTimes_InvalidUnits()
{
LocalTime t1 = new LocalTime(10, 0);
LocalTime t2 = LocalTime.FromHourMinuteSecondMillisecondTick(15, 30, 45, 20, 5);
Assert.Throws<ArgumentException>(() => Period.Between(t1, t2, 0));
Assert.Throws<ArgumentException>(() => Period.Between(t1, t2, (PeriodUnits)(-1)));
Assert.Throws<ArgumentException>(() => Period.Between(t1, t2, PeriodUnits.YearMonthDay));
Assert.Throws<ArgumentException>(() => Period.Between(t1, t2, PeriodUnits.Years | PeriodUnits.Hours));
}
[Test]
[TestCase("01:02:03", "05:00:00", PeriodUnits.Hours, 3)]
[TestCase("01:02:03", "03:00:00", PeriodUnits.Minutes, 117)]
[TestCase("01:02:03", "01:05:02", PeriodUnits.Seconds, 179)]
[TestCase("01:02:03", "01:02:04.1234", PeriodUnits.Milliseconds, 1123)]
[TestCase("01:02:03", "01:02:04.1234", PeriodUnits.Ticks, 11234000)]
[TestCase("01:02:03", "01:02:04.1234", PeriodUnits.Nanoseconds, 1123400000)]
public void BetweenLocalTimes_SingleUnit(string startText, string endText, PeriodUnits units, long expectedValue)
{
var start = LocalTimePattern.ExtendedIso.Parse(startText).Value;
var end = LocalTimePattern.ExtendedIso.Parse(endText).Value;
var actual = Period.Between(start, end, units);
var expected = new PeriodBuilder { [units] = expectedValue }.Build();
Assert.AreEqual(expected, actual);
}
[Test]
public void BetweenLocalTimes_MovingForwards()
{
LocalTime t1 = new LocalTime(10, 0);
LocalTime t2 = LocalTime.FromHourMinuteSecondMillisecondTick(15, 30, 45, 20, 5);
Assert.AreEqual(Period.FromHours(5) + Period.FromMinutes(30) + Period.FromSeconds(45) +
Period.FromMilliseconds(20) + Period.FromTicks(5),
Period.Between(t1, t2));
}
[Test]
public void BetweenLocalTimes_MovingBackwards()
{
LocalTime t1 = LocalTime.FromHourMinuteSecondMillisecondTick(15, 30, 45, 20, 5);
LocalTime t2 = new LocalTime(10, 0);
Assert.AreEqual(Period.FromHours(-5) + Period.FromMinutes(-30) + Period.FromSeconds(-45) +
Period.FromMilliseconds(-20) + Period.FromTicks(-5),
Period.Between(t1, t2));
}
[Test]
public void BetweenLocalTimes_MovingForwards_WithJustHours()
{
LocalTime t1 = new LocalTime(11, 30);
LocalTime t2 = new LocalTime(17, 15);
Assert.AreEqual(Period.FromHours(5), Period.Between(t1, t2, PeriodUnits.Hours));
}
[Test]
public void BetweenLocalTimes_MovingBackwards_WithJustHours()
{
LocalTime t1 = new LocalTime(17, 15);
LocalTime t2 = new LocalTime(11, 30);
Assert.AreEqual(Period.FromHours(-5), Period.Between(t1, t2, PeriodUnits.Hours));
}
[Test]
public void Addition_WithDifferent_PeriodTypes()
{
Period p1 = Period.FromHours(3);
Period p2 = Period.FromMinutes(20);
Period sum = p1 + p2;
Assert.AreEqual(3, sum.Hours);
Assert.AreEqual(20, sum.Minutes);
}
[Test]
public void Addition_With_IdenticalPeriodTypes()
{
Period p1 = Period.FromHours(3);
Period p2 = Period.FromHours(2);
Period sum = p1 + p2;
Assert.AreEqual(5, sum.Hours);
}
[Test]
public void Addition_DayCrossingMonthBoundary()
{
LocalDateTime start = new LocalDateTime(2010, 2, 20, 10, 0);
LocalDateTime result = start + Period.FromDays(10);
Assert.AreEqual(new LocalDateTime(2010, 3, 2, 10, 0), result);
}
[Test]
public void Addition_OneYearOnLeapDay()
{
LocalDateTime start = new LocalDateTime(2012, 2, 29, 10, 0);
LocalDateTime result = start + Period.FromYears(1);
// Feb 29th becomes Feb 28th
Assert.AreEqual(new LocalDateTime(2013, 2, 28, 10, 0), result);
}
[Test]
public void Addition_FourYearsOnLeapDay()
{
LocalDateTime start = new LocalDateTime(2012, 2, 29, 10, 0);
LocalDateTime result = start + Period.FromYears(4);
// Feb 29th is still valid in 2016
Assert.AreEqual(new LocalDateTime(2016, 2, 29, 10, 0), result);
}
[Test]
public void Addition_YearMonthDay()
{
// One year, one month, two days
Period period = Period.FromYears(1) + Period.FromMonths(1) + Period.FromDays(2);
LocalDateTime start = new LocalDateTime(2007, 1, 30, 0, 0);
// Periods are added in order, so this becomes...
// Add one year: Jan 30th 2008
// Add one month: Feb 29th 2008
// Add two days: March 2nd 2008
// If we added the days first, we'd end up with March 1st instead.
LocalDateTime result = start + period;
Assert.AreEqual(new LocalDateTime(2008, 3, 2, 0, 0), result);
}
[Test]
public void Subtraction_WithDifferent_PeriodTypes()
{
Period p1 = Period.FromHours(3);
Period p2 = Period.FromMinutes(20);
Period sum = p1 - p2;
Assert.AreEqual(3, sum.Hours);
Assert.AreEqual(-20, sum.Minutes);
}
[Test]
public void Subtraction_With_IdenticalPeriodTypes()
{
Period p1 = Period.FromHours(3);
Period p2 = Period.FromHours(2);
Period sum = p1 - p2;
Assert.AreEqual(1, sum.Hours);
}
[Test]
public void Equality_WhenEqual()
{
Assert.AreEqual(Period.FromHours(10), Period.FromHours(10));
Assert.AreEqual(Period.FromMinutes(15), Period.FromMinutes(15));
Assert.AreEqual(Period.FromDays(5), Period.FromDays(5));
}
[Test]
public void Equality_WithDifferentPeriodTypes_OnlyConsidersValues()
{
Period allFields = Period.FromMinutes(1) + Period.FromHours(1) - Period.FromMinutes(1);
Period justHours = Period.FromHours(1);
Assert.AreEqual(allFields, justHours);
}
[Test]
public void Equality_WhenUnequal()
{
Assert.IsFalse(Period.FromHours(10).Equals(Period.FromHours(20)));
Assert.IsFalse(Period.FromMinutes(15).Equals(Period.FromSeconds(15)));
Assert.IsFalse(Period.FromHours(1).Equals(Period.FromMinutes(60)));
Assert.IsFalse(Period.FromHours(1).Equals(new object()));
Assert.IsFalse(Period.FromHours(1).Equals(null));
Assert.IsFalse(Period.FromHours(1).Equals((object?) null));
}
[Test]
public void EqualityOperators()
{
Period val1 = Period.FromHours(1);
Period val2 = Period.FromHours(1);
Period val3 = Period.FromHours(2);
Period? val4 = null;
Assert.IsTrue(val1 == val2);
Assert.IsFalse(val1 == val3);
Assert.IsFalse(val1 == val4);
Assert.IsFalse(val4 == val1);
Assert.IsTrue(val4 == null);
Assert.IsTrue(null == val4);
Assert.IsFalse(val1 != val2);
Assert.IsTrue(val1 != val3);
Assert.IsTrue(val1 != val4);
Assert.IsTrue(val4 != val1);
Assert.IsFalse(val4 != null);
Assert.IsFalse(null != val4);
}
[Test]
[TestCase(PeriodUnits.Years, false)]
[TestCase(PeriodUnits.Weeks, false)]
[TestCase(PeriodUnits.Months, false)]
[TestCase(PeriodUnits.Days, false)]
[TestCase(PeriodUnits.Hours, true)]
[TestCase(PeriodUnits.Minutes, true)]
[TestCase(PeriodUnits.Seconds, true)]
[TestCase(PeriodUnits.Milliseconds, true)]
[TestCase(PeriodUnits.Ticks, true)]
[TestCase(PeriodUnits.Nanoseconds, true)]
public void HasTimeComponent_SingleValued(PeriodUnits unit, bool hasTimeComponent)
{
var period = new PeriodBuilder {[unit] = 1}.Build();
Assert.AreEqual(hasTimeComponent, period.HasTimeComponent);
}
[Test]
[TestCase(PeriodUnits.Years, true)]
[TestCase(PeriodUnits.Weeks, true)]
[TestCase(PeriodUnits.Months, true)]
[TestCase(PeriodUnits.Days, true)]
[TestCase(PeriodUnits.Hours, false)]
[TestCase(PeriodUnits.Minutes, false)]
[TestCase(PeriodUnits.Seconds, false)]
[TestCase(PeriodUnits.Milliseconds, false)]
[TestCase(PeriodUnits.Ticks, false)]
[TestCase(PeriodUnits.Nanoseconds, false)]
public void HasDateComponent_SingleValued(PeriodUnits unit, bool hasDateComponent)
{
var period = new PeriodBuilder {[unit] = 1 }.Build();
Assert.AreEqual(hasDateComponent, period.HasDateComponent);
}
[Test]
public void HasTimeComponent_Compound()
{
LocalDateTime dt1 = new LocalDateTime(2000, 1, 1, 10, 45, 00);
LocalDateTime dt2 = new LocalDateTime(2000, 2, 4, 11, 50, 00);
// Case 1: Entire period is date-based (no time units available)
Assert.IsFalse(Period.Between(dt1.Date, dt2.Date).HasTimeComponent);
// Case 2: Period contains date and time units, but time units are all zero
Assert.IsFalse(Period.Between(dt1.Date + LocalTime.Midnight, dt2.Date + LocalTime.Midnight).HasTimeComponent);
// Case 3: Entire period is time-based, but 0. (Same local time twice here.)
Assert.IsFalse(Period.Between(dt1.TimeOfDay, dt1.TimeOfDay).HasTimeComponent);
// Case 4: Period contains date and time units, and some time units are non-zero
Assert.IsTrue(Period.Between(dt1, dt2).HasTimeComponent);
// Case 5: Entire period is time-based, and some time units are non-zero
Assert.IsTrue(Period.Between(dt1.TimeOfDay, dt2.TimeOfDay).HasTimeComponent);
}
[Test]
public void HasDateComponent_Compound()
{
LocalDateTime dt1 = new LocalDateTime(2000, 1, 1, 10, 45, 00);
LocalDateTime dt2 = new LocalDateTime(2000, 2, 4, 11, 50, 00);
// Case 1: Entire period is time-based (no date units available)
Assert.IsFalse(Period.Between(dt1.TimeOfDay, dt2.TimeOfDay).HasDateComponent);
// Case 2: Period contains date and time units, but date units are all zero
Assert.IsFalse(Period.Between(dt1, dt1.Date + dt2.TimeOfDay).HasDateComponent);
// Case 3: Entire period is date-based, but 0. (Same local date twice here.)
Assert.IsFalse(Period.Between(dt1.Date, dt1.Date).HasDateComponent);
// Case 4: Period contains date and time units, and some date units are non-zero
Assert.IsTrue(Period.Between(dt1, dt2).HasDateComponent);
// Case 5: Entire period is date-based, and some time units are non-zero
Assert.IsTrue(Period.Between(dt1.Date, dt2.Date).HasDateComponent);
}
[Test]
public void ToString_Positive()
{
Period period = Period.FromDays(1) + Period.FromHours(2);
Assert.AreEqual("P1DT2H", period.ToString());
}
[Test]
public void ToString_AllUnits()
{
Period period = new Period(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
Assert.AreEqual("P1Y2M3W4DT5H6M7S8s9t10n", period.ToString());
}
[Test]
public void ToString_Negative()
{
Period period = Period.FromDays(-1) + Period.FromHours(-2);
Assert.AreEqual("P-1DT-2H", period.ToString());
}
[Test]
public void ToString_Mixed()
{
Period period = Period.FromDays(-1) + Period.FromHours(2);
Assert.AreEqual("P-1DT2H", period.ToString());
}
[Test]
public void ToString_Zero()
{
Assert.AreEqual("P", Period.Zero.ToString());
}
[Test]
public void ToBuilder_SingleUnit()
{
var builder = Period.FromHours(5).ToBuilder();
var expected = new PeriodBuilder { Hours = 5 }.Build();
Assert.AreEqual(expected, builder.Build());
}
[Test]
public void ToBuilder_MultipleUnits()
{
var builder = (Period.FromHours(5) + Period.FromWeeks(2)).ToBuilder();
var expected = new PeriodBuilder { Hours = 5, Weeks = 2 }.Build();
Assert.AreEqual(expected, builder.Build());
}
[Test]
public void Normalize_Weeks()
{
var original = new PeriodBuilder { Weeks = 2, Days = 5 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Days = 19 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_Hours()
{
var original = new PeriodBuilder { Hours = 25, Days = 1 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Hours = 1, Days = 2 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_Minutes()
{
var original = new PeriodBuilder { Hours = 1, Minutes = 150 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Hours = 3, Minutes = 30}.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_Seconds()
{
var original = new PeriodBuilder { Minutes = 1, Seconds = 150 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Minutes = 3, Seconds = 30 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_Milliseconds()
{
var original = new PeriodBuilder { Seconds = 1, Milliseconds = 1500 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Seconds = 2, Milliseconds= 500 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_Ticks()
{
var original = new PeriodBuilder { Milliseconds = 1, Ticks = 15000 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Milliseconds = 2, Ticks = 0, Nanoseconds = 500000 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_Nanoseconds()
{
var original = new PeriodBuilder { Ticks = 1, Nanoseconds = 150 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Nanoseconds = 250}.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_MultipleFields()
{
var original = new PeriodBuilder { Hours = 1, Minutes = 119, Seconds = 150 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Hours = 3, Minutes = 1, Seconds = 30 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_AllNegative()
{
var original = new PeriodBuilder { Hours = -1, Minutes = -119, Seconds = -150 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Hours = -3, Minutes = -1, Seconds = -30 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_MixedSigns_PositiveResult()
{
var original = new PeriodBuilder { Hours = 3, Minutes = -1 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Hours = 2, Minutes = 59 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_MixedSigns_NegativeResult()
{
var original = new PeriodBuilder { Hours = 1, Minutes = -121 }.Build();
var normalized = original.Normalize();
var expected = new PeriodBuilder { Hours = -1, Minutes = -1 }.Build();
Assert.AreEqual(expected, normalized);
}
[Test]
public void Normalize_DoesntAffectMonthsAndYears()
{
var original = new PeriodBuilder { Years = 2, Months = 1, Days = 400 }.Build();
Assert.AreEqual(original, original.Normalize());
}
[Test]
public void Normalize_ZeroResult()
{
var original = new PeriodBuilder { Years = 0 }.Build();
Assert.AreEqual(Period.Zero, original.Normalize());
}
[Test]
public void Normalize_Overflow()
{
Period period = Period.FromHours(long.MaxValue);
Assert.Throws<OverflowException>(() => period.Normalize());
}
[Test]
public void ToString_SingleUnit()
{
var period = Period.FromHours(5);
Assert.AreEqual("PT5H", period.ToString());
}
[Test]
public void ToString_MultipleUnits()
{
var period = new PeriodBuilder { Hours = 5, Minutes = 30 }.Build();
Assert.AreEqual("PT5H30M", period.ToString());
}
[Test]
public void ToDuration_InvalidWithYears()
{
Period period = Period.FromYears(1);
Assert.Throws<InvalidOperationException>(() => period.ToDuration());
}
[Test]
public void ToDuration_InvalidWithMonths()
{
Period period = Period.FromMonths(1);
Assert.Throws<InvalidOperationException>(() => period.ToDuration());
}
[Test]
public void ToDuration_ValidAllAcceptableUnits()
{
Period period = new PeriodBuilder
{
Weeks = 1,
Days = 2,
Hours = 3,
Minutes = 4,
Seconds = 5,
Milliseconds = 6,
Ticks = 7
}.Build();
Assert.AreEqual(
1 * NodaConstants.TicksPerWeek +
2 * NodaConstants.TicksPerDay +
3 * NodaConstants.TicksPerHour +
4 * NodaConstants.TicksPerMinute +
5 * NodaConstants.TicksPerSecond +
6 * NodaConstants.TicksPerMillisecond + 7,
period.ToDuration().BclCompatibleTicks);
}
[Test]
public void ToDuration_ValidWithZeroValuesInMonthYearUnits()
{
Period period = Period.FromMonths(1) + Period.FromYears(1);
period = period - period + Period.FromDays(1);
Assert.IsFalse(period.HasTimeComponent);
Assert.AreEqual(Duration.OneDay, period.ToDuration());
}
[Test]
[Category("Overflow")]
public void ToDuration_Overflow()
{
Period period = Period.FromSeconds(long.MaxValue);
Assert.Throws<OverflowException>(() => period.ToDuration());
}
[Test]
[Category("Overflow")]
public void ToDuration_Overflow_WhenPossiblyValid()
{
// These two should pretty much cancel each other out - and would, if we had a 128-bit integer
// representation to use.
Period period = Period.FromSeconds(long.MaxValue) + Period.FromMinutes(long.MinValue / 60);
Assert.Throws<OverflowException>(() => period.ToDuration());
}
[Test]
public void NormalizingEqualityComparer_NullToNonNull()
{
Period period = Period.FromYears(1);
Assert.IsFalse(Period.NormalizingEqualityComparer.Equals(period, null));
Assert.IsFalse(Period.NormalizingEqualityComparer.Equals(null, period));
}
[Test]
public void NormalizingEqualityComparer_NullToNull()
{
Assert.IsTrue(Period.NormalizingEqualityComparer.Equals(null, null));
}
[Test]
public void NormalizingEqualityComparer_PeriodToItself()
{
Period period = Period.FromYears(1);
Assert.IsTrue(Period.NormalizingEqualityComparer.Equals(period, period));
}
[Test]
public void NormalizingEqualityComparer_NonEqualAfterNormalization()
{
Period period1 = Period.FromHours(2);
Period period2 = Period.FromMinutes(150);
Assert.IsFalse(Period.NormalizingEqualityComparer.Equals(period1, period2));
}
[Test]
public void NormalizingEqualityComparer_EqualAfterNormalization()
{
Period period1 = Period.FromHours(2);
Period period2 = Period.FromMinutes(120);
Assert.IsTrue(Period.NormalizingEqualityComparer.Equals(period1, period2));
}
[Test]
public void NormalizingEqualityComparer_GetHashCodeAfterNormalization()
{
Period period1 = Period.FromHours(2);
Period period2 = Period.FromMinutes(120);
Assert.AreEqual(Period.NormalizingEqualityComparer.GetHashCode(period1),
Period.NormalizingEqualityComparer.GetHashCode(period2));
}
[Test]
public void Comparer_NullWithNull()
{
var comparer = Period.CreateComparer(new LocalDateTime(2000, 1, 1, 0, 0));
Assert.AreEqual(0, comparer.Compare(null, null));
}
[Test]
public void Comparer_NullWithNonNull()
{
var comparer = Period.CreateComparer(new LocalDateTime(2000, 1, 1, 0, 0));
Assert.That(comparer.Compare(null, Period.Zero), Is.LessThan(0));
}
[Test]
public void Comparer_NonNullWithNull()
{
var comparer = Period.CreateComparer(new LocalDateTime(2000, 1, 1, 0, 0));
Assert.That(comparer.Compare(Period.Zero, null), Is.GreaterThan(0));
}
[Test]
public void Comparer_DurationablePeriods()
{
var bigger = Period.FromHours(25);
var smaller = Period.FromDays(1);
var comparer = Period.CreateComparer(new LocalDateTime(2000, 1, 1, 0, 0));
Assert.That(comparer.Compare(bigger, smaller), Is.GreaterThan(0));
Assert.That(comparer.Compare(smaller, bigger), Is.LessThan(0));
Assert.AreEqual(0, comparer.Compare(bigger, bigger));
}
[Test]
public void Comparer_NonDurationablePeriods()
{
var month = Period.FromMonths(1);
var days = Period.FromDays(30);
// At the start of January, a month is longer than 30 days
var januaryComparer = Period.CreateComparer(new LocalDateTime(2000, 1, 1, 0, 0));
Assert.That(januaryComparer.Compare(month, days), Is.GreaterThan(0));
Assert.That(januaryComparer.Compare(days, month), Is.LessThan(0));
Assert.AreEqual(0, januaryComparer.Compare(month, month));
// At the start of February, a month is shorter than 30 days
var februaryComparer = Period.CreateComparer(new LocalDateTime(2000, 2, 1, 0, 0));
Assert.That(februaryComparer.Compare(month, days), Is.LessThan(0));
Assert.That(februaryComparer.Compare(days, month), Is.GreaterThan(0));
Assert.AreEqual(0, februaryComparer.Compare(month, month));
}
[Test]
[TestCaseSource(nameof(AllPeriodUnits))]
public void Between_ExtremeValues(PeriodUnits units)
{
// We can't use None, and Nanoseconds will *correctly* overflow.
if (units == PeriodUnits.None || units == PeriodUnits.Nanoseconds)
{
return;
}
var minValue = LocalDate.MinIsoValue.At(LocalTime.MinValue);
var maxValue = LocalDate.MaxIsoValue.At(LocalTime.MaxValue);
Period.Between(minValue, maxValue, units);
}
[Test]
public void Between_ExtremeValues_Overflow()
{
var minValue = LocalDate.MinIsoValue.At(LocalTime.MinValue);
var maxValue = LocalDate.MaxIsoValue.At(LocalTime.MaxValue);
Assert.Throws<OverflowException>(() => Period.Between(minValue, maxValue, PeriodUnits.Nanoseconds));
}
[Test]
[TestCase("2015-02-28T16:00:00", "2016-02-29T08:00:00", PeriodUnits.Years, 1, 0)]
[TestCase("2015-02-28T16:00:00", "2016-02-29T08:00:00", PeriodUnits.Months, 12, -11)]
[TestCase("2014-01-01T16:00:00", "2014-01-03T08:00:00", PeriodUnits.Days, 1, -1)]
[TestCase("2014-01-01T16:00:00", "2014-01-03T08:00:00", PeriodUnits.Hours, 40, -40)]
public void Between_LocalDateTime_AwkwardTimeOfDayWithSingleUnit(string startText, string endText, PeriodUnits units, int expectedForward, int expectedBackward)
{
LocalDateTime start = LocalDateTimePattern.ExtendedIso.Parse(startText).Value;
LocalDateTime end = LocalDateTimePattern.ExtendedIso.Parse(endText).Value;
Period forward = Period.Between(start, end, units);
Assert.AreEqual(expectedForward, forward.ToBuilder()[units]);
Period backward = Period.Between(end, start, units);
Assert.AreEqual(expectedBackward, backward.ToBuilder()[units]);
}
[Test]
public void Between_LocalDateTime_SameValue()
{
LocalDateTime start = new LocalDateTime(2014, 1, 1, 16, 0, 0);
Assert.AreSame(Period.Zero, Period.Between(start, start));
}
[Test]
public void Between_LocalDateTime_AwkwardTimeOfDayWithMultipleUnits()
{
LocalDateTime start = new LocalDateTime(2014, 1, 1, 16, 0, 0);
LocalDateTime end = new LocalDateTime(2015, 2, 3, 8, 0, 0);
Period actual = Period.Between(start, end, PeriodUnits.YearMonthDay | PeriodUnits.AllTimeUnits);
Period expected = new PeriodBuilder { Years = 1, Months = 1, Days = 1, Hours = 16 }.Build();
Assert.AreEqual(expected, actual);
}
[Test]
public void FromNanoseconds()
{
var period = Period.FromNanoseconds(1234567890L);
Assert.AreEqual(1234567890L, period.Nanoseconds);
}
[Test]
public void AddPeriodToPeriod_NoOverflow()
{
Period p1 = Period.FromHours(long.MaxValue);
Period p2 = Period.FromMinutes(60);
Assert.AreEqual(new PeriodBuilder { Hours = long.MaxValue, Minutes = 60 }.Build(), p1 + p2);
}
[Test]
public void AddPeriodToPeriod_Overflow()
{
Period p1 = Period.FromHours(long.MaxValue);
Period p2 = Period.FromHours(1);
Assert.Throws<OverflowException>(() => (p1 + p2).GetHashCode());
}
/// <summary>
/// Just a simple way of parsing a period string. It's a more compact period representation.
/// </summary>
private static Period Parse(string text)
{
return PeriodPattern.Roundtrip.Parse(text).Value;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Text;
using System;
using System.Runtime.Serialization;
namespace System.Text
{
// An Encoder is used to encode a sequence of blocks of characters into
// a sequence of blocks of bytes. Following instantiation of an encoder,
// sequential blocks of characters are converted into blocks of bytes through
// calls to the GetBytes method. The encoder maintains state between the
// conversions, allowing it to correctly encode character sequences that span
// adjacent blocks.
//
// Instances of specific implementations of the Encoder abstract base
// class are typically obtained through calls to the GetEncoder method
// of Encoding objects.
//
internal class EncoderNLS : Encoder, ISerializable
{
// Need a place for the last left over character, most of our encodings use this
internal char charLeftOver;
protected EncodingNLS m_encoding;
protected bool m_mustFlush;
internal bool m_throwOnOverflow;
internal int m_charsUsed;
internal EncoderFallback m_fallback;
internal EncoderFallbackBuffer? m_fallbackBuffer;
internal EncoderNLS(EncodingNLS encoding)
{
m_encoding = encoding;
m_fallback = m_encoding.EncoderFallback;
Reset();
}
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context)
{
throw new PlatformNotSupportedException();
}
internal new EncoderFallback Fallback
{
get { return m_fallback; }
}
internal bool InternalHasFallbackBuffer
{
get
{
return m_fallbackBuffer != null;
}
}
public new EncoderFallbackBuffer FallbackBuffer
{
get
{
if (m_fallbackBuffer == null)
{
if (m_fallback != null)
m_fallbackBuffer = m_fallback.CreateFallbackBuffer();
else
m_fallbackBuffer = EncoderFallback.ReplacementFallback.CreateFallbackBuffer();
}
return m_fallbackBuffer;
}
}
public override void Reset()
{
charLeftOver = (char)0;
if (m_fallbackBuffer != null)
m_fallbackBuffer.Reset();
}
public override unsafe int GetByteCount(char[] chars, int index, int count, bool flush)
{
// Validate input parameters
if (chars == null)
throw new ArgumentNullException(nameof(chars), SR.ArgumentNull_Array);
if (index < 0 || count < 0)
throw new ArgumentOutOfRangeException((index < 0 ? nameof(index) : nameof(count)), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - index < count)
throw new ArgumentOutOfRangeException(nameof(chars), SR.ArgumentOutOfRange_IndexCountBuffer);
// Avoid empty input problem
if (chars.Length == 0)
chars = new char[1];
// Just call the pointer version
int result = -1;
fixed (char* pChars = &chars[0])
{
result = GetByteCount(pChars + index, count, flush);
}
return result;
}
public override unsafe int GetByteCount(char* chars, int count, bool flush)
{
// Validate input parameters
if (chars == null)
throw new ArgumentNullException(nameof(chars), SR.ArgumentNull_Array);
if (count < 0)
throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum);
m_mustFlush = flush;
m_throwOnOverflow = true;
return m_encoding.GetByteCount(chars, count, this);
}
public override unsafe int GetBytes(char[] chars, int charIndex, int charCount,
byte[] bytes, int byteIndex, bool flush)
{
// Validate parameters
if (chars == null || bytes == null)
throw new ArgumentNullException((chars == null ? nameof(chars) : nameof(bytes)), SR.ArgumentNull_Array);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? nameof(charIndex) : nameof(charCount)), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException(nameof(chars), SR.ArgumentOutOfRange_IndexCountBuffer);
if (byteIndex < 0 || byteIndex > bytes.Length)
throw new ArgumentOutOfRangeException(nameof(byteIndex), SR.ArgumentOutOfRange_Index);
if (chars.Length == 0)
chars = new char[1];
int byteCount = bytes.Length - byteIndex;
if (bytes.Length == 0)
bytes = new byte[1];
// Just call pointer version
fixed (char* pChars = &chars[0])
fixed (byte* pBytes = &bytes[0])
// Remember that charCount is # to decode, not size of array.
return GetBytes(pChars + charIndex, charCount, pBytes + byteIndex, byteCount, flush);
}
public override unsafe int GetBytes(char* chars, int charCount, byte* bytes, int byteCount, bool flush)
{
// Validate parameters
if (chars == null || bytes == null)
throw new ArgumentNullException((chars == null ? nameof(chars) : nameof(bytes)), SR.ArgumentNull_Array);
if (byteCount < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((byteCount < 0 ? nameof(byteCount) : nameof(charCount)), SR.ArgumentOutOfRange_NeedNonNegNum);
m_mustFlush = flush;
m_throwOnOverflow = true;
return m_encoding.GetBytes(chars, charCount, bytes, byteCount, this);
}
// This method is used when your output buffer might not be large enough for the entire result.
// Just call the pointer version. (This gets bytes)
public override unsafe void Convert(char[] chars, int charIndex, int charCount,
byte[] bytes, int byteIndex, int byteCount, bool flush,
out int charsUsed, out int bytesUsed, out bool completed)
{
// Validate parameters
if (chars == null || bytes == null)
throw new ArgumentNullException((chars == null ? nameof(chars) : nameof(bytes)), SR.ArgumentNull_Array);
if (charIndex < 0 || charCount < 0)
throw new ArgumentOutOfRangeException((charIndex < 0 ? nameof(charIndex) : nameof(charCount)), SR.ArgumentOutOfRange_NeedNonNegNum);
if (byteIndex < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((byteIndex < 0 ? nameof(byteIndex) : nameof(byteCount)), SR.ArgumentOutOfRange_NeedNonNegNum);
if (chars.Length - charIndex < charCount)
throw new ArgumentOutOfRangeException(nameof(chars), SR.ArgumentOutOfRange_IndexCountBuffer);
if (bytes.Length - byteIndex < byteCount)
throw new ArgumentOutOfRangeException(nameof(bytes), SR.ArgumentOutOfRange_IndexCountBuffer);
// Avoid empty input problem
if (chars.Length == 0)
chars = new char[1];
if (bytes.Length == 0)
bytes = new byte[1];
// Just call the pointer version (can't do this for non-msft encoders)
fixed (char* pChars = &chars[0])
{
fixed (byte* pBytes = &bytes[0])
{
Convert(pChars + charIndex, charCount, pBytes + byteIndex, byteCount, flush,
out charsUsed, out bytesUsed, out completed);
}
}
}
// This is the version that uses pointers. We call the base encoding worker function
// after setting our appropriate internal variables. This is getting bytes
public override unsafe void Convert(char* chars, int charCount,
byte* bytes, int byteCount, bool flush,
out int charsUsed, out int bytesUsed, out bool completed)
{
// Validate input parameters
if (bytes == null || chars == null)
throw new ArgumentNullException(bytes == null ? nameof(bytes) : nameof(chars), SR.ArgumentNull_Array);
if (charCount < 0 || byteCount < 0)
throw new ArgumentOutOfRangeException((charCount < 0 ? nameof(charCount) : nameof(byteCount)), SR.ArgumentOutOfRange_NeedNonNegNum);
// We don't want to throw
m_mustFlush = flush;
m_throwOnOverflow = false;
m_charsUsed = 0;
// Do conversion
bytesUsed = m_encoding.GetBytes(chars, charCount, bytes, byteCount, this);
charsUsed = m_charsUsed;
// Its completed if they've used what they wanted AND if they didn't want flush or if we are flushed
completed = (charsUsed == charCount) && (!flush || !HasState) &&
(m_fallbackBuffer == null || m_fallbackBuffer.Remaining == 0);
// Our data thingies are now full, we can return
}
public Encoding Encoding
{
get
{
return m_encoding;
}
}
public bool MustFlush
{
get
{
return m_mustFlush;
}
}
// Anything left in our encoder?
internal virtual bool HasState
{
get
{
return (charLeftOver != (char)0);
}
}
// Allow encoding to clear our must flush instead of throwing (in ThrowBytesOverflow)
internal void ClearMustFlush()
{
m_mustFlush = false;
}
}
}
| |
using Lucene.Net.Index;
using System.Collections.Generic;
using System.Diagnostics;
using System.Runtime.CompilerServices;
namespace Lucene.Net.Codecs
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using BytesRef = Lucene.Net.Util.BytesRef;
using DocsAndPositionsEnum = Lucene.Net.Index.DocsAndPositionsEnum;
using DocsEnum = Lucene.Net.Index.DocsEnum;
using IndexOptions = Lucene.Net.Index.IndexOptions;
using FieldInfo = Lucene.Net.Index.FieldInfo; // javadocs
using FixedBitSet = Lucene.Net.Util.FixedBitSet;
using MergeState = Lucene.Net.Index.MergeState;
using MultiDocsAndPositionsEnum = Lucene.Net.Index.MultiDocsAndPositionsEnum;
using MultiDocsEnum = Lucene.Net.Index.MultiDocsEnum;
using TermsEnum = Lucene.Net.Index.TermsEnum;
/// <summary>
/// Abstract API that consumes terms for an individual field.
/// <para/>
/// The lifecycle is:
/// <list type="number">
/// <item><description>TermsConsumer is returned for each field
/// by <see cref="FieldsConsumer.AddField(FieldInfo)"/>.</description></item>
/// <item><description>TermsConsumer returns a <see cref="PostingsConsumer"/> for
/// each term in <see cref="StartTerm(BytesRef)"/>.</description></item>
/// <item><description>When the producer (e.g. IndexWriter)
/// is done adding documents for the term, it calls
/// <see cref="FinishTerm(BytesRef, TermStats)"/>, passing in
/// the accumulated term statistics.</description></item>
/// <item><description>Producer calls <see cref="Finish(long, long, int)"/> with
/// the accumulated collection statistics when it is finished
/// adding terms to the field.</description></item>
/// </list>
/// <para/>
/// @lucene.experimental
/// </summary>
public abstract class TermsConsumer
{
/// <summary>
/// Sole constructor. (For invocation by subclass
/// constructors, typically implicit.)
/// </summary>
protected internal TermsConsumer()
{
}
/// <summary>
/// Starts a new term in this field; this may be called
/// with no corresponding call to finish if the term had
/// no docs.
/// </summary>
public abstract PostingsConsumer StartTerm(BytesRef text);
/// <summary>
/// Finishes the current term; numDocs must be > 0.
/// <c>stats.TotalTermFreq</c> will be -1 when term
/// frequencies are omitted for the field.
/// </summary>
public abstract void FinishTerm(BytesRef text, TermStats stats);
/// <summary>
/// Called when we are done adding terms to this field.
/// <paramref name="sumTotalTermFreq"/> will be -1 when term
/// frequencies are omitted for the field.
/// </summary>
public abstract void Finish(long sumTotalTermFreq, long sumDocFreq, int docCount);
/// <summary>
/// Gets the <see cref="T:IComparer{BytesRef}"/> used to sort terms
/// before feeding to this API.
/// </summary>
public abstract IComparer<BytesRef> Comparer { get; }
private MappingMultiDocsEnum docsEnum;
private MappingMultiDocsEnum docsAndFreqsEnum;
private MappingMultiDocsAndPositionsEnum postingsEnum;
/// <summary>
/// Default merge impl. </summary>
[MethodImpl(MethodImplOptions.NoInlining)]
public virtual void Merge(MergeState mergeState, IndexOptions indexOptions, TermsEnum termsEnum)
{
BytesRef term;
Debug.Assert(termsEnum != null);
long sumTotalTermFreq = 0;
long sumDocFreq = 0;
long sumDFsinceLastAbortCheck = 0;
FixedBitSet visitedDocs = new FixedBitSet(mergeState.SegmentInfo.DocCount);
if (indexOptions == IndexOptions.DOCS_ONLY)
{
if (docsEnum == null)
{
docsEnum = new MappingMultiDocsEnum();
}
docsEnum.MergeState = mergeState;
MultiDocsEnum docsEnumIn = null;
while ((term = termsEnum.Next()) != null)
{
// We can pass null for liveDocs, because the
// mapping enum will skip the non-live docs:
docsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsEnumIn, DocsFlags.NONE);
if (docsEnumIn != null)
{
docsEnum.Reset(docsEnumIn);
PostingsConsumer postingsConsumer = StartTerm(term);
TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsEnum, visitedDocs);
if (stats.DocFreq > 0)
{
FinishTerm(term, stats);
sumTotalTermFreq += stats.DocFreq;
sumDFsinceLastAbortCheck += stats.DocFreq;
sumDocFreq += stats.DocFreq;
if (sumDFsinceLastAbortCheck > 60000)
{
mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0);
sumDFsinceLastAbortCheck = 0;
}
}
}
}
}
else if (indexOptions == IndexOptions.DOCS_AND_FREQS)
{
if (docsAndFreqsEnum == null)
{
docsAndFreqsEnum = new MappingMultiDocsEnum();
}
docsAndFreqsEnum.MergeState = mergeState;
MultiDocsEnum docsAndFreqsEnumIn = null;
while ((term = termsEnum.Next()) != null)
{
// We can pass null for liveDocs, because the
// mapping enum will skip the non-live docs:
docsAndFreqsEnumIn = (MultiDocsEnum)termsEnum.Docs(null, docsAndFreqsEnumIn);
Debug.Assert(docsAndFreqsEnumIn != null);
docsAndFreqsEnum.Reset(docsAndFreqsEnumIn);
PostingsConsumer postingsConsumer = StartTerm(term);
TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, docsAndFreqsEnum, visitedDocs);
if (stats.DocFreq > 0)
{
FinishTerm(term, stats);
sumTotalTermFreq += stats.TotalTermFreq;
sumDFsinceLastAbortCheck += stats.DocFreq;
sumDocFreq += stats.DocFreq;
if (sumDFsinceLastAbortCheck > 60000)
{
mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0);
sumDFsinceLastAbortCheck = 0;
}
}
}
}
else if (indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)
{
if (postingsEnum == null)
{
postingsEnum = new MappingMultiDocsAndPositionsEnum();
}
postingsEnum.MergeState = mergeState;
MultiDocsAndPositionsEnum postingsEnumIn = null;
while ((term = termsEnum.Next()) != null)
{
// We can pass null for liveDocs, because the
// mapping enum will skip the non-live docs:
postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn, DocsAndPositionsFlags.PAYLOADS);
Debug.Assert(postingsEnumIn != null);
postingsEnum.Reset(postingsEnumIn);
PostingsConsumer postingsConsumer = StartTerm(term);
TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, postingsEnum, visitedDocs);
if (stats.DocFreq > 0)
{
FinishTerm(term, stats);
sumTotalTermFreq += stats.TotalTermFreq;
sumDFsinceLastAbortCheck += stats.DocFreq;
sumDocFreq += stats.DocFreq;
if (sumDFsinceLastAbortCheck > 60000)
{
mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0);
sumDFsinceLastAbortCheck = 0;
}
}
}
}
else
{
Debug.Assert(indexOptions == IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS);
if (postingsEnum == null)
{
postingsEnum = new MappingMultiDocsAndPositionsEnum();
}
postingsEnum.MergeState = mergeState;
MultiDocsAndPositionsEnum postingsEnumIn = null;
while ((term = termsEnum.Next()) != null)
{
// We can pass null for liveDocs, because the
// mapping enum will skip the non-live docs:
postingsEnumIn = (MultiDocsAndPositionsEnum)termsEnum.DocsAndPositions(null, postingsEnumIn);
Debug.Assert(postingsEnumIn != null);
postingsEnum.Reset(postingsEnumIn);
PostingsConsumer postingsConsumer = StartTerm(term);
TermStats stats = postingsConsumer.Merge(mergeState, indexOptions, postingsEnum, visitedDocs);
if (stats.DocFreq > 0)
{
FinishTerm(term, stats);
sumTotalTermFreq += stats.TotalTermFreq;
sumDFsinceLastAbortCheck += stats.DocFreq;
sumDocFreq += stats.DocFreq;
if (sumDFsinceLastAbortCheck > 60000)
{
mergeState.CheckAbort.Work(sumDFsinceLastAbortCheck / 5.0);
sumDFsinceLastAbortCheck = 0;
}
}
}
}
Finish(indexOptions == IndexOptions.DOCS_ONLY ? -1 : sumTotalTermFreq, sumDocFreq, visitedDocs.Cardinality());
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Management.Automation;
using System.Management.Automation.Host;
using System.Security;
using System.Text;
using System.Windows.Media;
namespace NuGetConsole.Host.PowerShell.Implementation
{
internal class NuGetHostUserInterface : PSHostUserInterface, IHostUISupportsMultipleChoiceSelection
{
public const ConsoleColor NoColor = (ConsoleColor)(-1);
private const int VkCodeReturn = 13;
private const int VkCodeBackspace = 8;
private static Color[] _consoleColors;
private readonly NuGetPSHost _host;
private readonly object _instanceLock = new object();
private PSHostRawUserInterface _rawUI;
public NuGetHostUserInterface(NuGetPSHost host)
{
UtilityMethods.ThrowIfArgumentNull(host);
_host = host;
}
private IConsole Console
{
get { return _host.ActiveConsole; }
}
public override PSHostRawUserInterface RawUI
{
get
{
if (_rawUI == null)
{
_rawUI = new NuGetRawUserInterface(_host);
}
return _rawUI;
}
}
public Collection<int> PromptForChoice(
string caption, string message, Collection<ChoiceDescription> choices, IEnumerable<int> defaultChoices)
{
WriteErrorLine("IHostUISupportsMultipleChoiceSelection.PromptForChoice not implemented.");
return null;
}
private static Type GetFieldType(FieldDescription field)
{
Type type = null;
if (!String.IsNullOrEmpty(field.ParameterAssemblyFullName))
{
LanguagePrimitives.TryConvertTo(field.ParameterAssemblyFullName, out type);
}
if ((type == null) && !String.IsNullOrEmpty(field.ParameterTypeFullName))
{
LanguagePrimitives.TryConvertTo(field.ParameterTypeFullName, out type);
}
return type;
}
public override Dictionary<string, PSObject> Prompt(
string caption, string message, Collection<FieldDescription> descriptions)
{
if (descriptions == null)
{
throw new ArgumentNullException("descriptions");
}
if (descriptions.Count == 0)
{
// emulate powershell.exe behavior for empty collection.
throw new ArgumentException(
Resources.ZeroLengthCollection, "descriptions");
}
if (!String.IsNullOrEmpty(caption))
{
WriteLine(caption);
}
if (!String.IsNullOrEmpty(message))
{
WriteLine(message);
}
// this stores the field/value pairs - e.g. unbound missing mandatory parameters,
// or scripted $host.ui.prompt invocation.
var results = new Dictionary<string, PSObject>(descriptions.Count);
int index = 0;
foreach (FieldDescription description in descriptions)
{
// if type is not resolvable, throw (as per powershell.exe)
if ((description == null) ||
String.IsNullOrEmpty(description.ParameterAssemblyFullName))
{
throw new ArgumentException("descriptions[" + index + "]");
}
bool cancelled;
object answer;
string name = description.Name;
// as per powershell.exe, if input value cannot be coerced to
// parameter type then default to string.
Type fieldType = GetFieldType(description) ?? typeof(String);
// is parameter a collection type?
if (typeof(IList).IsAssignableFrom(fieldType))
{
// [int[]]$param1, [string[]]$param2
cancelled = PromptCollection(name, fieldType, out answer);
}
else
{
//[int]$param1, [string]$param2
cancelled = PromptScalar(name, fieldType, out answer);
}
// user hit ESC?
if (cancelled)
{
WriteLine(String.Empty);
results.Clear();
break;
}
results.Add(name, PSObject.AsPSObject(answer));
index++;
}
return results;
}
// parameter type is a scalar, like [int] or [string]
private bool PromptScalar(string name, Type fieldType, out object answer)
{
bool cancelled;
// if field a securestring, we prompt with masked input
if (fieldType.Equals(typeof(SecureString)))
{
Write(name + ": ");
answer = ReadLineAsSecureString();
cancelled = (answer == null);
}
// if field is a credential type, we prompt with the secure dialog
else if (fieldType.Equals(typeof(PSCredential)))
{
answer = PromptForCredential(null, null, null, String.Empty);
cancelled = (answer == null);
}
else
{
// everything else is accepted as string, and coerced to the target type
// if coercion fails, just pass as string.
bool coercable = true;
string prompt = name + ": ";
do
{
if (coercable)
{
// display field label as a prompt
Write(prompt);
}
else
{
// last input invalid, display in red
Write(prompt, ConsoleColor.Red);
}
string line = ReadLine();
// user hit ESC?
cancelled = (line == null);
// can powershell turn this string into the field type?
coercable = LanguagePrimitives.TryConvertTo(line, fieldType, out answer);
} while (!cancelled && !coercable);
}
return cancelled;
}
// parameter type is a collection, like [int[]] or [string[]]
private bool PromptCollection(string name, Type fieldType, out object answer)
{
bool cancelled;
// we default to an object[] array
Type elementType = typeof(Object);
if (fieldType.IsArray)
{
elementType = fieldType.GetElementType();
// FIXME: zero rank array check?
}
// we will hold a list of the user's string input(s)
var valuesToConvert = new ArrayList();
bool coercable = true;
while (true)
{
// prompt for collection element, suffixed with the current index
string prompt = String.Format(
CultureInfo.CurrentCulture,
"{0}[{1}]: ", name, valuesToConvert.Count);
if (coercable)
{
Write(prompt);
}
else
{
// last input invalid, display prompt in red
Write(prompt, ConsoleColor.Red);
}
string input = ReadLine();
// user hit ESC?
cancelled = (input == null);
// user hit ENTER on an empty line? we treat this as
// terminating the input for the collection prompting.
bool inputComplete = String.IsNullOrEmpty(input);
if (cancelled || inputComplete)
{
break;
}
// can powershell convert this input to the element type?
coercable = LanguagePrimitives.TryConvertTo(input, elementType, out answer);
if (coercable)
{
// yes, so store it
valuesToConvert.Add(answer);
}
}
if (!cancelled)
{
// now, try to convert the entire collection of user inputs to the field's collection type
if (!LanguagePrimitives.TryConvertTo(valuesToConvert, elementType, out answer))
{
answer = valuesToConvert;
}
}
else
{
answer = null;
}
return cancelled;
}
public override int PromptForChoice(string caption, string message, Collection<ChoiceDescription> choices, int defaultChoice)
{
if (!String.IsNullOrEmpty(caption))
{
WriteLine(caption);
}
if (!String.IsNullOrEmpty(message))
{
WriteLine(message);
}
int chosen = -1;
do
{
// holds hotkeys, e.g. "[Y] Yes [N] No"
var accelerators = new string[choices.Count];
for (int index = 0; index < choices.Count; index++)
{
ChoiceDescription choice = choices[index];
string label = choice.Label;
int ampIndex = label.IndexOf('&'); // hotkey marker
accelerators[index] = String.Empty; // default to empty
// accelerator marker found?
if (ampIndex != -1 && ampIndex < label.Length - 1)
{
// grab the letter after '&'
accelerators[index] = label
.Substring(ampIndex + 1, 1)
.ToUpper(CultureInfo.CurrentCulture);
}
Write(String.Format(CultureInfo.CurrentCulture, "[{0}] {1} ",
accelerators[index],
// remove the redundant marker from output
label.Replace("&", String.Empty)));
}
Write(String.Format(CultureInfo.CurrentCulture, Resources.PromptForChoiceSuffix, accelerators[defaultChoice]));
string input = ReadLine().Trim();
switch (input.Length)
{
case 0:
// enter, accept default if provided
if (defaultChoice == -1)
{
continue;
}
chosen = defaultChoice;
break;
case 1:
if (input[0] == '?')
{
// show help
for (int index = 0; index < choices.Count; index++)
{
WriteLine(String.Format(
CultureInfo.CurrentCulture, "{0} - {1}.", accelerators[index], choices[index].HelpMessage));
}
}
else
{
// single letter accelerator, e.g. "Y"
chosen = Array.FindIndex(
accelerators,
accelerator => accelerator.Equals(
input,
StringComparison.OrdinalIgnoreCase));
}
break;
default:
// match against entire label, e.g. "Yes"
chosen = Array.FindIndex(
choices.ToArray(),
choice => choice.Label.Equals(
input,
StringComparison.OrdinalIgnoreCase));
break;
}
} while (chosen == -1);
return chosen;
}
public override PSCredential PromptForCredential(
string caption, string message, string userName, string targetName,
PSCredentialTypes allowedCredentialTypes, PSCredentialUIOptions options)
{
return NativeMethods.CredUIPromptForCredentials(
caption,
message,
userName,
targetName,
allowedCredentialTypes,
options);
}
public override PSCredential PromptForCredential(
string caption, string message, string userName, string targetName)
{
return PromptForCredential(
caption,
message,
userName,
targetName,
PSCredentialTypes.Default,
PSCredentialUIOptions.Default);
}
public override string ReadLine()
{
try
{
var builder = new StringBuilder();
lock (_instanceLock)
{
KeyInfo keyInfo;
while ((keyInfo = RawUI.ReadKey()).VirtualKeyCode != VkCodeReturn)
{
// {enter}
if (keyInfo.VirtualKeyCode == VkCodeBackspace)
{
if (builder.Length > 0)
{
builder.Remove(builder.Length - 1, 1);
Console.WriteBackspace();
}
}
else
{
builder.Append(keyInfo.Character);
// destined for output, so apply culture
Write(keyInfo.Character.ToString(CultureInfo.CurrentCulture));
}
}
}
return builder.ToString();
}
catch (PipelineStoppedException)
{
// ESC was hit
return null;
}
finally
{
WriteLine(String.Empty);
}
}
[SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope", Justification = "Caller's responsibility to dispose.")]
public override SecureString ReadLineAsSecureString()
{
var secureString = new SecureString();
try
{
lock (_instanceLock)
{
KeyInfo keyInfo;
while ((keyInfo = RawUI.ReadKey()).VirtualKeyCode != VkCodeReturn)
{
// {enter}
if (keyInfo.VirtualKeyCode == VkCodeBackspace)
{
if (secureString.Length > 0)
{
secureString.RemoveAt(secureString.Length - 1);
Console.WriteBackspace();
}
}
else
{
// culture is deferred until securestring is decrypted
secureString.AppendChar(keyInfo.Character);
Write("*");
}
}
secureString.MakeReadOnly();
}
return secureString;
}
catch (PipelineStoppedException)
{
// ESC was hit, clean up secure string
secureString.Dispose();
return null;
}
finally
{
WriteLine(String.Empty);
}
}
/// <summary>
/// Convert a System.ConsoleColor enum to a Color value, or null if c is not a valid enum.
/// </summary>
private static Color? ToColor(ConsoleColor c)
{
if (_consoleColors == null)
{
// colors copied from hkcu:\Console color table
_consoleColors = new Color[16] {
Color.FromRgb(0x00, 0x00, 0x00),
Color.FromRgb(0x00, 0x00, 0x80),
Color.FromRgb(0x00, 0x80, 0x00),
Color.FromRgb(0x00, 0x80, 0x80),
Color.FromRgb(0x80, 0x00, 0x00),
Color.FromRgb(0x80, 0x00, 0x80),
Color.FromRgb(0x80, 0x80, 0x00),
Color.FromRgb(0xC0, 0xC0, 0xC0),
Color.FromRgb(0x80, 0x80, 0x80),
Color.FromRgb(0x00, 0x00, 0xFF),
Color.FromRgb(0x00, 0xFF, 0x00),
Color.FromRgb(0x00, 0xFF, 0xFF),
Color.FromRgb(0xFF, 0x00, 0x00),
Color.FromRgb(0xFF, 0x00, 0xFF),
Color.FromRgb(0xFF, 0xFF, 0x00),
Color.FromRgb(0xFF, 0xFF, 0xFF),
};
}
var i = (int)c;
if (i >= 0 && i < _consoleColors.Length)
{
return _consoleColors[i];
}
return null; // invalid color
}
public override void Write(string value)
{
Console.Write(value);
}
public override void WriteLine(string value)
{
Console.WriteLine(value);
}
private void Write(string value, ConsoleColor foregroundColor, ConsoleColor backgroundColor = NoColor)
{
Console.Write(value, ToColor(foregroundColor), ToColor(backgroundColor));
}
private void WriteLine(string value, ConsoleColor foregroundColor, ConsoleColor backgroundColor = NoColor)
{
// If append \n only, text becomes 1 line when copied to notepad.
Write(value + Environment.NewLine, foregroundColor, backgroundColor);
}
public override void Write(ConsoleColor foregroundColor, ConsoleColor backgroundColor, string value)
{
Write(value, foregroundColor, backgroundColor);
}
public override void WriteDebugLine(string message)
{
WriteLine(message, ConsoleColor.DarkGray);
}
public override void WriteErrorLine(string value)
{
WriteLine(value, foregroundColor: ConsoleColor.White, backgroundColor: ConsoleColor.Red);
}
public override void WriteProgress(long sourceId, ProgressRecord record)
{
string operation = record.CurrentOperation ?? record.StatusDescription;
if (!String.IsNullOrEmpty(operation))
{
Console.WriteProgress(operation, record.PercentComplete);
}
}
public override void WriteVerboseLine(string message)
{
WriteLine(message, ConsoleColor.DarkGray);
}
public override void WriteWarningLine(string message)
{
WriteLine(message, foregroundColor: ConsoleColor.Black, backgroundColor: ConsoleColor.Yellow);
}
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using Google.Api.Gax;
using Google.Api.Gax.Grpc;
using Google.Cloud.Logging.V2;
using Google.Protobuf;
using Google.Protobuf.WellKnownTypes;
using Grpc.Core;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace Google.Cloud.Logging.V2.Snippets
{
/// <summary>Generated snippets</summary>
public class GeneratedConfigServiceV2ClientSnippets
{
/// <summary>Snippet for ListSinksAsync</summary>
public async Task ListSinksAsync()
{
// Snippet: ListSinksAsync(ParentNameOneof,string,int?,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
// Make the request
PagedAsyncEnumerable<ListSinksResponse, LogSink> response =
configServiceV2Client.ListSinksAsync(parent);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((LogSink item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListSinksResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogSink item in page)
{
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogSink> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogSink item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListSinks</summary>
public void ListSinks()
{
// Snippet: ListSinks(ParentNameOneof,string,int?,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
// Make the request
PagedEnumerable<ListSinksResponse, LogSink> response =
configServiceV2Client.ListSinks(parent);
// Iterate over all response items, lazily performing RPCs as required
foreach (LogSink item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListSinksResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogSink item in page)
{
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogSink> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogSink item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListSinksAsync</summary>
public async Task ListSinksAsync_RequestObject()
{
// Snippet: ListSinksAsync(ListSinksRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ListSinksRequest request = new ListSinksRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
};
// Make the request
PagedAsyncEnumerable<ListSinksResponse, LogSink> response =
configServiceV2Client.ListSinksAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((LogSink item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListSinksResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogSink item in page)
{
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogSink> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogSink item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListSinks</summary>
public void ListSinks_RequestObject()
{
// Snippet: ListSinks(ListSinksRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ListSinksRequest request = new ListSinksRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
};
// Make the request
PagedEnumerable<ListSinksResponse, LogSink> response =
configServiceV2Client.ListSinks(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (LogSink item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListSinksResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogSink item in page)
{
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogSink> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogSink item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for GetSinkAsync</summary>
public async Task GetSinkAsync()
{
// Snippet: GetSinkAsync(SinkNameOneof,CallSettings)
// Additional: GetSinkAsync(SinkNameOneof,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
SinkNameOneof sinkName = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]"));
// Make the request
LogSink response = await configServiceV2Client.GetSinkAsync(sinkName);
// End snippet
}
/// <summary>Snippet for GetSink</summary>
public void GetSink()
{
// Snippet: GetSink(SinkNameOneof,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
SinkNameOneof sinkName = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]"));
// Make the request
LogSink response = configServiceV2Client.GetSink(sinkName);
// End snippet
}
/// <summary>Snippet for GetSinkAsync</summary>
public async Task GetSinkAsync_RequestObject()
{
// Snippet: GetSinkAsync(GetSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
GetSinkRequest request = new GetSinkRequest
{
SinkNameAsSinkNameOneof = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]")),
};
// Make the request
LogSink response = await configServiceV2Client.GetSinkAsync(request);
// End snippet
}
/// <summary>Snippet for GetSink</summary>
public void GetSink_RequestObject()
{
// Snippet: GetSink(GetSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
GetSinkRequest request = new GetSinkRequest
{
SinkNameAsSinkNameOneof = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]")),
};
// Make the request
LogSink response = configServiceV2Client.GetSink(request);
// End snippet
}
/// <summary>Snippet for CreateSinkAsync</summary>
public async Task CreateSinkAsync()
{
// Snippet: CreateSinkAsync(ParentNameOneof,LogSink,CallSettings)
// Additional: CreateSinkAsync(ParentNameOneof,LogSink,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
LogSink sink = new LogSink();
// Make the request
LogSink response = await configServiceV2Client.CreateSinkAsync(parent, sink);
// End snippet
}
/// <summary>Snippet for CreateSink</summary>
public void CreateSink()
{
// Snippet: CreateSink(ParentNameOneof,LogSink,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
LogSink sink = new LogSink();
// Make the request
LogSink response = configServiceV2Client.CreateSink(parent, sink);
// End snippet
}
/// <summary>Snippet for CreateSinkAsync</summary>
public async Task CreateSinkAsync_RequestObject()
{
// Snippet: CreateSinkAsync(CreateSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
CreateSinkRequest request = new CreateSinkRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
Sink = new LogSink(),
};
// Make the request
LogSink response = await configServiceV2Client.CreateSinkAsync(request);
// End snippet
}
/// <summary>Snippet for CreateSink</summary>
public void CreateSink_RequestObject()
{
// Snippet: CreateSink(CreateSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
CreateSinkRequest request = new CreateSinkRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
Sink = new LogSink(),
};
// Make the request
LogSink response = configServiceV2Client.CreateSink(request);
// End snippet
}
/// <summary>Snippet for UpdateSinkAsync</summary>
public async Task UpdateSinkAsync()
{
// Snippet: UpdateSinkAsync(SinkNameOneof,LogSink,CallSettings)
// Additional: UpdateSinkAsync(SinkNameOneof,LogSink,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
SinkNameOneof sinkName = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]"));
LogSink sink = new LogSink();
// Make the request
LogSink response = await configServiceV2Client.UpdateSinkAsync(sinkName, sink);
// End snippet
}
/// <summary>Snippet for UpdateSink</summary>
public void UpdateSink()
{
// Snippet: UpdateSink(SinkNameOneof,LogSink,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
SinkNameOneof sinkName = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]"));
LogSink sink = new LogSink();
// Make the request
LogSink response = configServiceV2Client.UpdateSink(sinkName, sink);
// End snippet
}
/// <summary>Snippet for UpdateSinkAsync</summary>
public async Task UpdateSinkAsync_RequestObject()
{
// Snippet: UpdateSinkAsync(UpdateSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
UpdateSinkRequest request = new UpdateSinkRequest
{
SinkNameAsSinkNameOneof = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]")),
Sink = new LogSink(),
};
// Make the request
LogSink response = await configServiceV2Client.UpdateSinkAsync(request);
// End snippet
}
/// <summary>Snippet for UpdateSink</summary>
public void UpdateSink_RequestObject()
{
// Snippet: UpdateSink(UpdateSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
UpdateSinkRequest request = new UpdateSinkRequest
{
SinkNameAsSinkNameOneof = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]")),
Sink = new LogSink(),
};
// Make the request
LogSink response = configServiceV2Client.UpdateSink(request);
// End snippet
}
/// <summary>Snippet for DeleteSinkAsync</summary>
public async Task DeleteSinkAsync()
{
// Snippet: DeleteSinkAsync(SinkNameOneof,CallSettings)
// Additional: DeleteSinkAsync(SinkNameOneof,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
SinkNameOneof sinkName = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]"));
// Make the request
await configServiceV2Client.DeleteSinkAsync(sinkName);
// End snippet
}
/// <summary>Snippet for DeleteSink</summary>
public void DeleteSink()
{
// Snippet: DeleteSink(SinkNameOneof,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
SinkNameOneof sinkName = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]"));
// Make the request
configServiceV2Client.DeleteSink(sinkName);
// End snippet
}
/// <summary>Snippet for DeleteSinkAsync</summary>
public async Task DeleteSinkAsync_RequestObject()
{
// Snippet: DeleteSinkAsync(DeleteSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
DeleteSinkRequest request = new DeleteSinkRequest
{
SinkNameAsSinkNameOneof = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]")),
};
// Make the request
await configServiceV2Client.DeleteSinkAsync(request);
// End snippet
}
/// <summary>Snippet for DeleteSink</summary>
public void DeleteSink_RequestObject()
{
// Snippet: DeleteSink(DeleteSinkRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
DeleteSinkRequest request = new DeleteSinkRequest
{
SinkNameAsSinkNameOneof = SinkNameOneof.From(new SinkName("[PROJECT]", "[SINK]")),
};
// Make the request
configServiceV2Client.DeleteSink(request);
// End snippet
}
/// <summary>Snippet for ListExclusionsAsync</summary>
public async Task ListExclusionsAsync()
{
// Snippet: ListExclusionsAsync(ParentNameOneof,string,int?,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
// Make the request
PagedAsyncEnumerable<ListExclusionsResponse, LogExclusion> response =
configServiceV2Client.ListExclusionsAsync(parent);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((LogExclusion item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListExclusionsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogExclusion item in page)
{
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogExclusion> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogExclusion item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListExclusions</summary>
public void ListExclusions()
{
// Snippet: ListExclusions(ParentNameOneof,string,int?,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
// Make the request
PagedEnumerable<ListExclusionsResponse, LogExclusion> response =
configServiceV2Client.ListExclusions(parent);
// Iterate over all response items, lazily performing RPCs as required
foreach (LogExclusion item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListExclusionsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogExclusion item in page)
{
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogExclusion> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogExclusion item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListExclusionsAsync</summary>
public async Task ListExclusionsAsync_RequestObject()
{
// Snippet: ListExclusionsAsync(ListExclusionsRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ListExclusionsRequest request = new ListExclusionsRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
};
// Make the request
PagedAsyncEnumerable<ListExclusionsResponse, LogExclusion> response =
configServiceV2Client.ListExclusionsAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((LogExclusion item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListExclusionsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogExclusion item in page)
{
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogExclusion> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogExclusion item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListExclusions</summary>
public void ListExclusions_RequestObject()
{
// Snippet: ListExclusions(ListExclusionsRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ListExclusionsRequest request = new ListExclusionsRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
};
// Make the request
PagedEnumerable<ListExclusionsResponse, LogExclusion> response =
configServiceV2Client.ListExclusions(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (LogExclusion item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListExclusionsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (LogExclusion item in page)
{
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<LogExclusion> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (LogExclusion item in singlePage)
{
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for GetExclusionAsync</summary>
public async Task GetExclusionAsync()
{
// Snippet: GetExclusionAsync(ExclusionNameOneof,CallSettings)
// Additional: GetExclusionAsync(ExclusionNameOneof,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ExclusionNameOneof name = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]"));
// Make the request
LogExclusion response = await configServiceV2Client.GetExclusionAsync(name);
// End snippet
}
/// <summary>Snippet for GetExclusion</summary>
public void GetExclusion()
{
// Snippet: GetExclusion(ExclusionNameOneof,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ExclusionNameOneof name = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]"));
// Make the request
LogExclusion response = configServiceV2Client.GetExclusion(name);
// End snippet
}
/// <summary>Snippet for GetExclusionAsync</summary>
public async Task GetExclusionAsync_RequestObject()
{
// Snippet: GetExclusionAsync(GetExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
GetExclusionRequest request = new GetExclusionRequest
{
ExclusionNameOneof = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]")),
};
// Make the request
LogExclusion response = await configServiceV2Client.GetExclusionAsync(request);
// End snippet
}
/// <summary>Snippet for GetExclusion</summary>
public void GetExclusion_RequestObject()
{
// Snippet: GetExclusion(GetExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
GetExclusionRequest request = new GetExclusionRequest
{
ExclusionNameOneof = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]")),
};
// Make the request
LogExclusion response = configServiceV2Client.GetExclusion(request);
// End snippet
}
/// <summary>Snippet for CreateExclusionAsync</summary>
public async Task CreateExclusionAsync()
{
// Snippet: CreateExclusionAsync(ParentNameOneof,LogExclusion,CallSettings)
// Additional: CreateExclusionAsync(ParentNameOneof,LogExclusion,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
LogExclusion exclusion = new LogExclusion();
// Make the request
LogExclusion response = await configServiceV2Client.CreateExclusionAsync(parent, exclusion);
// End snippet
}
/// <summary>Snippet for CreateExclusion</summary>
public void CreateExclusion()
{
// Snippet: CreateExclusion(ParentNameOneof,LogExclusion,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ParentNameOneof parent = ParentNameOneof.From(new ProjectName("[PROJECT]"));
LogExclusion exclusion = new LogExclusion();
// Make the request
LogExclusion response = configServiceV2Client.CreateExclusion(parent, exclusion);
// End snippet
}
/// <summary>Snippet for CreateExclusionAsync</summary>
public async Task CreateExclusionAsync_RequestObject()
{
// Snippet: CreateExclusionAsync(CreateExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
CreateExclusionRequest request = new CreateExclusionRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
Exclusion = new LogExclusion(),
};
// Make the request
LogExclusion response = await configServiceV2Client.CreateExclusionAsync(request);
// End snippet
}
/// <summary>Snippet for CreateExclusion</summary>
public void CreateExclusion_RequestObject()
{
// Snippet: CreateExclusion(CreateExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
CreateExclusionRequest request = new CreateExclusionRequest
{
ParentAsParentNameOneof = ParentNameOneof.From(new ProjectName("[PROJECT]")),
Exclusion = new LogExclusion(),
};
// Make the request
LogExclusion response = configServiceV2Client.CreateExclusion(request);
// End snippet
}
/// <summary>Snippet for UpdateExclusionAsync</summary>
public async Task UpdateExclusionAsync()
{
// Snippet: UpdateExclusionAsync(ExclusionNameOneof,LogExclusion,FieldMask,CallSettings)
// Additional: UpdateExclusionAsync(ExclusionNameOneof,LogExclusion,FieldMask,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ExclusionNameOneof name = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]"));
LogExclusion exclusion = new LogExclusion();
FieldMask updateMask = new FieldMask();
// Make the request
LogExclusion response = await configServiceV2Client.UpdateExclusionAsync(name, exclusion, updateMask);
// End snippet
}
/// <summary>Snippet for UpdateExclusion</summary>
public void UpdateExclusion()
{
// Snippet: UpdateExclusion(ExclusionNameOneof,LogExclusion,FieldMask,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ExclusionNameOneof name = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]"));
LogExclusion exclusion = new LogExclusion();
FieldMask updateMask = new FieldMask();
// Make the request
LogExclusion response = configServiceV2Client.UpdateExclusion(name, exclusion, updateMask);
// End snippet
}
/// <summary>Snippet for UpdateExclusionAsync</summary>
public async Task UpdateExclusionAsync_RequestObject()
{
// Snippet: UpdateExclusionAsync(UpdateExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
UpdateExclusionRequest request = new UpdateExclusionRequest
{
ExclusionNameOneof = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]")),
Exclusion = new LogExclusion(),
UpdateMask = new FieldMask(),
};
// Make the request
LogExclusion response = await configServiceV2Client.UpdateExclusionAsync(request);
// End snippet
}
/// <summary>Snippet for UpdateExclusion</summary>
public void UpdateExclusion_RequestObject()
{
// Snippet: UpdateExclusion(UpdateExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
UpdateExclusionRequest request = new UpdateExclusionRequest
{
ExclusionNameOneof = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]")),
Exclusion = new LogExclusion(),
UpdateMask = new FieldMask(),
};
// Make the request
LogExclusion response = configServiceV2Client.UpdateExclusion(request);
// End snippet
}
/// <summary>Snippet for DeleteExclusionAsync</summary>
public async Task DeleteExclusionAsync()
{
// Snippet: DeleteExclusionAsync(ExclusionNameOneof,CallSettings)
// Additional: DeleteExclusionAsync(ExclusionNameOneof,CancellationToken)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
ExclusionNameOneof name = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]"));
// Make the request
await configServiceV2Client.DeleteExclusionAsync(name);
// End snippet
}
/// <summary>Snippet for DeleteExclusion</summary>
public void DeleteExclusion()
{
// Snippet: DeleteExclusion(ExclusionNameOneof,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
ExclusionNameOneof name = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]"));
// Make the request
configServiceV2Client.DeleteExclusion(name);
// End snippet
}
/// <summary>Snippet for DeleteExclusionAsync</summary>
public async Task DeleteExclusionAsync_RequestObject()
{
// Snippet: DeleteExclusionAsync(DeleteExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = await ConfigServiceV2Client.CreateAsync();
// Initialize request argument(s)
DeleteExclusionRequest request = new DeleteExclusionRequest
{
ExclusionNameOneof = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]")),
};
// Make the request
await configServiceV2Client.DeleteExclusionAsync(request);
// End snippet
}
/// <summary>Snippet for DeleteExclusion</summary>
public void DeleteExclusion_RequestObject()
{
// Snippet: DeleteExclusion(DeleteExclusionRequest,CallSettings)
// Create client
ConfigServiceV2Client configServiceV2Client = ConfigServiceV2Client.Create();
// Initialize request argument(s)
DeleteExclusionRequest request = new DeleteExclusionRequest
{
ExclusionNameOneof = ExclusionNameOneof.From(new ExclusionName("[PROJECT]", "[EXCLUSION]")),
};
// Make the request
configServiceV2Client.DeleteExclusion(request);
// End snippet
}
}
}
| |
// Copyright (c) 2013-2016 CoreTweet Development Team
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
#if !NET35
using System.Threading;
using System.Threading.Tasks;
#endif
namespace LibAzyotter
{
internal static class EnumerableExtensions
{
internal static IEnumerable<string> EnumerateLines(this StreamReader streamReader)
{
while(!streamReader.EndOfStream)
yield return streamReader.ReadLine();
}
internal static void ForEach<T>(this IEnumerable<T> source, Action<T> action)
{
foreach(T item in source)
action(item);
}
internal static string JoinToString<T>(this IEnumerable<T> source)
{
#if !NET35
return string.Concat(source);
#else
return string.Concat(source.Cast<object>().ToArray());
#endif
}
internal static string JoinToString<T>(this IEnumerable<T> source, string separator)
{
#if !NET35
return string.Join(separator, source);
#else
return string.Join(separator, source.Select(x => x.ToString()).ToArray());
#endif
}
internal static IEnumerable<T> EndWith<T>(this IEnumerable<T> source, params T[] second)
{
return source.Concat(second);
}
}
internal static class DisposableExtensions
{
internal class Using<T>
where T : IDisposable
{
internal T Source { get; }
internal Using(T source)
{
Source = source;
}
}
internal static Using<T> Use<T>(this T source)
where T : IDisposable
{
return new Using<T>(source);
}
internal static TResult SelectMany<T, TSecond, TResult>
(this Using<T> source, Func<T, Using<TSecond>> second, Func<T, TSecond, TResult> selector)
where T : IDisposable
where TSecond : IDisposable
{
using(source.Source)
using(var s = second(source.Source).Source)
return selector(source.Source, s);
}
internal static TResult Select<T, TResult>(this Using<T> source, Func<T, TResult> selector)
where T : IDisposable
{
using(source.Source)
return selector(source.Source);
}
}
internal static class StreamExtensions
{
internal static void WriteString(this Stream stream, string value)
{
var bytes = Encoding.UTF8.GetBytes(value);
stream.Write(bytes, 0, bytes.Length);
}
}
internal static class ExceptionExtensions
{
internal static void Rethrow(this Exception ex)
{
#if NET45 || WIN_RT || WP8
System.Runtime.ExceptionServices
.ExceptionDispatchInfo.Capture(ex)
.Throw();
#else
throw ex;
#endif
}
}
internal static class TypeInfoExtensions
{
internal static IEnumerable<TypeInfo> GetInterfaces(this TypeInfo source)
{
return source.ImplementedInterfaces.Select(IntrospectionExtensions.GetTypeInfo);
}
internal static PropertyInfo GetProperty(this TypeInfo source, string name)
{
return source.GetDeclaredProperty(name);
}
internal static MethodInfo GetGetMethod(this PropertyInfo source)
{
return source.GetMethod;
}
}
#if !NET35
internal struct Unit
{
internal static readonly Unit Default = new Unit();
}
internal static class TaskExtensions
{
internal static Task<TResult> Done<TSource, TResult>(this Task<TSource> source, Func<TSource, TResult> action, CancellationToken cancellationToken)
{
var tcs = new TaskCompletionSource<TResult>();
source.ContinueWith(t =>
{
if(t.IsCanceled || cancellationToken.IsCancellationRequested)
{
tcs.TrySetCanceled();
return;
}
if(t.Exception != null)
{
tcs.TrySetException(t.Exception.InnerExceptions.Count == 1 ? t.Exception.InnerException : t.Exception);
return;
}
try
{
tcs.TrySetResult(action(t.Result));
}
catch(OperationCanceledException)
{
tcs.TrySetCanceled();
}
catch(Exception ex)
{
tcs.TrySetException(ex);
}
});
return tcs.Task;
}
internal static Task Done<TSource>(this Task<TSource> source, Action<TSource> action, CancellationToken cancellationToken, bool longRunning = false)
{
var tcs = new TaskCompletionSource<Unit>();
source.ContinueWith(t =>
{
if (t.IsCanceled || cancellationToken.IsCancellationRequested)
{
tcs.TrySetCanceled();
return;
}
if (t.Exception != null)
{
tcs.TrySetException(t.Exception.InnerExceptions.Count == 1 ? t.Exception.InnerException : t.Exception);
return;
}
try
{
action(t.Result);
tcs.TrySetResult(Unit.Default);
}
catch (OperationCanceledException)
{
tcs.TrySetCanceled();
}
catch (Exception ex)
{
tcs.TrySetException(ex);
}
}, longRunning ? TaskContinuationOptions.LongRunning : TaskContinuationOptions.None);
return tcs.Task;
}
internal static Task<TResult> Done<TResult>(this Task source, Func<TResult> action, CancellationToken cancellationToken)
{
var tcs = new TaskCompletionSource<TResult>();
source.ContinueWith(t =>
{
if (t.IsCanceled || cancellationToken.IsCancellationRequested)
{
tcs.TrySetCanceled();
return;
}
if (t.Exception != null)
{
tcs.TrySetException(t.Exception.InnerExceptions.Count == 1 ? t.Exception.InnerException : t.Exception);
return;
}
try
{
tcs.TrySetResult(action());
}
catch (OperationCanceledException)
{
tcs.TrySetCanceled();
}
catch (Exception ex)
{
tcs.TrySetException(ex);
}
});
return tcs.Task;
}
}
#endif
}
| |
#region S# License
/******************************************************************************************
NOTICE!!! This program and source code is owned and licensed by
StockSharp, LLC, www.stocksharp.com
Viewing or use of this code requires your acceptance of the license
agreement found at https://github.com/StockSharp/StockSharp/blob/master/LICENSE
Removal of this comment is a violation of the license agreement.
Project: StockSharp.BusinessEntities.BusinessEntities
File: IConnector.cs
Created: 2015, 11, 11, 2:32 PM
Copyright 2010 by StockSharp, LLC
*******************************************************************************************/
#endregion S# License
namespace StockSharp.BusinessEntities
{
using System;
using System.Collections.Generic;
using Ecng.Serialization;
using StockSharp.Logging;
using StockSharp.Messages;
/// <summary>
/// The main interface providing the connection to the trading systems.
/// </summary>
public interface IConnector : IPersistable, ILogReceiver, IMarketDataProvider, ISecurityProvider, INewsProvider, IPortfolioProvider
{
/// <summary>
/// Own trade received.
/// </summary>
event Action<MyTrade> NewMyTrade;
/// <summary>
/// Own trades received.
/// </summary>
event Action<IEnumerable<MyTrade>> NewMyTrades;
/// <summary>
/// Tick trade received.
/// </summary>
event Action<Trade> NewTrade;
/// <summary>
/// Tick trades received.
/// </summary>
event Action<IEnumerable<Trade>> NewTrades;
/// <summary>
/// Order received.
/// </summary>
event Action<Order> NewOrder;
/// <summary>
/// Orders received.
/// </summary>
event Action<IEnumerable<Order>> NewOrders;
/// <summary>
/// Order changed (cancelled, matched).
/// </summary>
event Action<Order> OrderChanged;
/// <summary>
/// Orders changed (cancelled, matched).
/// </summary>
event Action<IEnumerable<Order>> OrdersChanged;
/// <summary>
/// Order registration error event.
/// </summary>
event Action<OrderFail> OrderRegisterFailed;
/// <summary>
/// Order cancellation error event.
/// </summary>
event Action<OrderFail> OrderCancelFailed;
/// <summary>
/// Order registration errors event.
/// </summary>
event Action<IEnumerable<OrderFail>> OrdersRegisterFailed;
/// <summary>
/// Order cancellation errors event.
/// </summary>
event Action<IEnumerable<OrderFail>> OrdersCancelFailed;
/// <summary>
/// Stop-order registration errors event.
/// </summary>
event Action<IEnumerable<OrderFail>> StopOrdersRegisterFailed;
/// <summary>
/// Stop-order cancellation errors event.
/// </summary>
event Action<IEnumerable<OrderFail>> StopOrdersCancelFailed;
/// <summary>
/// Stop-orders received.
/// </summary>
event Action<IEnumerable<Order>> NewStopOrders;
/// <summary>
/// Stop orders state change event.
/// </summary>
event Action<IEnumerable<Order>> StopOrdersChanged;
/// <summary>
/// Security received.
/// </summary>
event Action<Security> NewSecurity;
/// <summary>
/// Securities received.
/// </summary>
event Action<IEnumerable<Security>> NewSecurities;
/// <summary>
/// Security changed.
/// </summary>
event Action<Security> SecurityChanged;
/// <summary>
/// Securities changed.
/// </summary>
event Action<IEnumerable<Security>> SecuritiesChanged;
/// <summary>
/// Portfolios received.
/// </summary>
event Action<IEnumerable<Portfolio>> NewPortfolios;
/// <summary>
/// Portfolio changed.
/// </summary>
event Action<Portfolio> PortfolioChanged;
/// <summary>
/// Portfolios changed.
/// </summary>
event Action<IEnumerable<Portfolio>> PortfoliosChanged;
/// <summary>
/// Position received.
/// </summary>
event Action<Position> NewPosition;
/// <summary>
/// Positions received.
/// </summary>
event Action<IEnumerable<Position>> NewPositions;
/// <summary>
/// Position changed.
/// </summary>
event Action<Position> PositionChanged;
/// <summary>
/// Positions changed.
/// </summary>
event Action<IEnumerable<Position>> PositionsChanged;
/// <summary>
/// Order book received.
/// </summary>
event Action<MarketDepth> NewMarketDepth;
/// <summary>
/// Order book changed.
/// </summary>
event Action<MarketDepth> MarketDepthChanged;
/// <summary>
/// Order books received.
/// </summary>
event Action<IEnumerable<MarketDepth>> NewMarketDepths;
/// <summary>
/// Order books changed.
/// </summary>
event Action<IEnumerable<MarketDepth>> MarketDepthsChanged;
/// <summary>
/// Order log received.
/// </summary>
event Action<OrderLogItem> NewOrderLogItem;
/// <summary>
/// Order log received.
/// </summary>
event Action<IEnumerable<OrderLogItem>> NewOrderLogItems;
/// <summary>
/// News received.
/// </summary>
event Action<News> NewNews;
/// <summary>
/// News updated (news body received <see cref="StockSharp.BusinessEntities.News.Story"/>).
/// </summary>
event Action<News> NewsChanged;
/// <summary>
/// Message processed <see cref="Message"/>.
/// </summary>
event Action<Message> NewMessage;
/// <summary>
/// Connected.
/// </summary>
event Action Connected;
/// <summary>
/// Disconnected.
/// </summary>
event Action Disconnected;
/// <summary>
/// Connection error (for example, the connection was aborted by server).
/// </summary>
event Action<Exception> ConnectionError;
/// <summary>
/// Dats process error.
/// </summary>
event Action<Exception> Error;
/// <summary>
/// Server time changed <see cref="IConnector.ExchangeBoards"/>. It passed the time difference since the last call of the event. The first time the event passes the value <see cref="TimeSpan.Zero"/>.
/// </summary>
event Action<TimeSpan> MarketTimeChanged;
/// <summary>
/// Lookup result <see cref="LookupSecurities(StockSharp.BusinessEntities.Security)"/> received.
/// </summary>
event Action<IEnumerable<Security>> LookupSecuritiesResult;
/// <summary>
/// Lookup result <see cref="LookupPortfolios"/> received.
/// </summary>
event Action<IEnumerable<Portfolio>> LookupPortfoliosResult;
/// <summary>
/// Successful subscription market-data.
/// </summary>
event Action<Security, MarketDataTypes> MarketDataSubscriptionSucceeded;
/// <summary>
/// Error subscription market-data.
/// </summary>
event Action<Security, MarketDataTypes, Exception> MarketDataSubscriptionFailed;
/// <summary>
/// Session changed.
/// </summary>
event Action<ExchangeBoard, SessionStates> SessionStateChanged;
/// <summary>
/// Get session state for required board.
/// </summary>
/// <param name="board">Electronic board.</param>
/// <returns>Session state. If the information about session state does not exist, then <see langword="null" /> will be returned.</returns>
SessionStates? GetSessionState(ExchangeBoard board);
/// <summary>
/// List of all exchange boards, for which instruments are loaded <see cref="Securities"/>.
/// </summary>
IEnumerable<ExchangeBoard> ExchangeBoards { get; }
/// <summary>
/// List of all loaded instruments. It should be called after event <see cref="IConnector.NewSecurities"/> arisen. Otherwise the empty set will be returned.
/// </summary>
IEnumerable<Security> Securities { get; }
/// <summary>
/// Get all orders.
/// </summary>
IEnumerable<Order> Orders { get; }
/// <summary>
/// Get all stop-orders.
/// </summary>
IEnumerable<Order> StopOrders { get; }
/// <summary>
/// Get all registration errors.
/// </summary>
IEnumerable<OrderFail> OrderRegisterFails { get; }
/// <summary>
/// Get all cancellation errors.
/// </summary>
IEnumerable<OrderFail> OrderCancelFails { get; }
/// <summary>
/// Get all tick trades.
/// </summary>
IEnumerable<Trade> Trades { get; }
/// <summary>
/// Get all own trades.
/// </summary>
IEnumerable<MyTrade> MyTrades { get; }
/// <summary>
/// Get all positions.
/// </summary>
IEnumerable<Position> Positions { get; }
/// <summary>
/// All news.
/// </summary>
IEnumerable<News> News { get; }
/// <summary>
/// Connection state.
/// </summary>
ConnectionStates ConnectionState { get; }
/// <summary>
/// Gets a value indicating whether the re-registration orders via the method <see cref="ReRegisterOrder(StockSharp.BusinessEntities.Order,StockSharp.BusinessEntities.Order)"/> as a single transaction.
/// </summary>
bool IsSupportAtomicReRegister { get; }
/// <summary>
/// List of all securities, subscribed via <see cref="RegisterSecurity"/>.
/// </summary>
IEnumerable<Security> RegisteredSecurities { get; }
/// <summary>
/// List of all securities, subscribed via <see cref="RegisterMarketDepth"/>.
/// </summary>
IEnumerable<Security> RegisteredMarketDepths { get; }
/// <summary>
/// List of all securities, subscribed via <see cref="RegisterTrades"/>.
/// </summary>
IEnumerable<Security> RegisteredTrades { get; }
/// <summary>
/// List of all securities, subscribed via <see cref="RegisterOrderLog"/>.
/// </summary>
IEnumerable<Security> RegisteredOrderLogs { get; }
/// <summary>
/// List of all portfolios, subscribed via <see cref="RegisterPortfolio"/>.
/// </summary>
IEnumerable<Portfolio> RegisteredPortfolios { get; }
/// <summary>
/// Transactional adapter.
/// </summary>
IMessageAdapter TransactionAdapter { get; }
/// <summary>
/// Market-data adapter.
/// </summary>
IMessageAdapter MarketDataAdapter { get; }
/// <summary>
/// Connect to trading system.
/// </summary>
void Connect();
/// <summary>
/// Disconnect from trading system.
/// </summary>
void Disconnect();
/// <summary>
/// To find instruments that match the filter <paramref name="criteria" />. Found instruments will be passed through the event <see cref="LookupSecuritiesResult"/>.
/// </summary>
/// <param name="criteria">The instrument whose fields will be used as a filter.</param>
void LookupSecurities(Security criteria);
/// <summary>
/// To find instruments that match the filter <paramref name="criteria" />. Found instruments will be passed through the event <see cref="LookupSecuritiesResult"/>.
/// </summary>
/// <param name="criteria">The criterion which fields will be used as a filter.</param>
void LookupSecurities(SecurityLookupMessage criteria);
/// <summary>
/// To find portfolios that match the filter <paramref name="criteria" />. Found portfolios will be passed through the event <see cref="LookupPortfoliosResult"/>.
/// </summary>
/// <param name="criteria">The portfolio which fields will be used as a filter.</param>
void LookupPortfolios(Portfolio criteria);
/// <summary>
/// To get the position by portfolio and instrument.
/// </summary>
/// <param name="portfolio">The portfolio on which the position should be found.</param>
/// <param name="security">The instrument on which the position should be found.</param>
/// <param name="depoName">The depository name where the stock is located physically. By default, an empty string is passed, which means the total position by all depositories.</param>
/// <returns>Position.</returns>
Position GetPosition(Portfolio portfolio, Security security, string depoName = "");
/// <summary>
/// Get filtered order book.
/// </summary>
/// <param name="security">The instrument by which an order book should be got.</param>
/// <returns>Filtered order book.</returns>
MarketDepth GetFilteredMarketDepth(Security security);
/// <summary>
/// Register new order.
/// </summary>
/// <param name="order">Registration details.</param>
void RegisterOrder(Order order);
/// <summary>
/// Reregister the order.
/// </summary>
/// <param name="oldOrder">Cancelling order.</param>
/// <param name="newOrder">New order to register.</param>
void ReRegisterOrder(Order oldOrder, Order newOrder);
/// <summary>
/// Reregister the order.
/// </summary>
/// <param name="oldOrder">Changing order.</param>
/// <param name="price">Price of the new order.</param>
/// <param name="volume">Volume of the new order.</param>
/// <returns>New order.</returns>
Order ReRegisterOrder(Order oldOrder, decimal price, decimal volume);
/// <summary>
/// Cancel the order.
/// </summary>
/// <param name="order">The order which should be canceled.</param>
void CancelOrder(Order order);
/// <summary>
/// Cancel orders by filter.
/// </summary>
/// <param name="isStopOrder"><see langword="true" />, if cancel only a stop orders, <see langword="false" /> - if regular orders, <see langword="null" /> - both.</param>
/// <param name="portfolio">Portfolio. If the value is equal to <see langword="null" />, then the portfolio does not match the orders cancel filter.</param>
/// <param name="direction">Order side. If the value is <see langword="null" />, the direction does not use.</param>
/// <param name="board">Trading board. If the value is equal to <see langword="null" />, then the board does not match the orders cancel filter.</param>
/// <param name="security">Instrument. If the value is equal to <see langword="null" />, then the instrument does not match the orders cancel filter.</param>
void CancelOrders(bool? isStopOrder = null, Portfolio portfolio = null, Sides? direction = null, ExchangeBoard board = null, Security security = null);
/// <summary>
/// To sign up to get market data by the instrument.
/// </summary>
/// <param name="security">The instrument by which new information getting should be started .</param>
/// <param name="type">Market data type.</param>
void SubscribeMarketData(Security security, MarketDataTypes type);
/// <summary>
/// To unsubscribe from getting market data by the instrument.
/// </summary>
/// <param name="security">The instrument by which new information getting should be started .</param>
/// <param name="type">Market data type.</param>
void UnSubscribeMarketData(Security security, MarketDataTypes type);
/// <summary>
/// To start getting quotes (order book) by the instrument. Quotes values are available through the event <see cref="IConnector.MarketDepthsChanged"/>.
/// </summary>
/// <param name="security">The instrument by which quotes getting should be started.</param>
void RegisterMarketDepth(Security security);
/// <summary>
/// To stop getting quotes by the instrument.
/// </summary>
/// <param name="security">The instrument by which quotes getting should be stopped.</param>
void UnRegisterMarketDepth(Security security);
/// <summary>
/// To start getting filtered quotes (order book) by the instrument. Quotes values are available through the event <see cref="GetFilteredMarketDepth"/>.
/// </summary>
/// <param name="security">The instrument by which quotes getting should be started.</param>
void RegisterFilteredMarketDepth(Security security);
/// <summary>
/// To stop getting filtered quotes by the instrument.
/// </summary>
/// <param name="security">The instrument by which quotes getting should be stopped.</param>
void UnRegisterFilteredMarketDepth(Security security);
/// <summary>
/// To start getting trades (tick data) by the instrument. New trades will come through the event <see cref="IConnector.NewTrades"/>.
/// </summary>
/// <param name="security">The instrument by which trades getting should be started.</param>
void RegisterTrades(Security security);
/// <summary>
/// To stop getting trades (tick data) by the instrument.
/// </summary>
/// <param name="security">The instrument by which trades getting should be stopped.</param>
void UnRegisterTrades(Security security);
/// <summary>
/// To start getting new information (for example, <see cref="Security.LastTrade"/> or <see cref="Security.BestBid"/>) by the instrument.
/// </summary>
/// <param name="security">The instrument by which new information getting should be started .</param>
void RegisterSecurity(Security security);
/// <summary>
/// To stop getting new information.
/// </summary>
/// <param name="security">The instrument by which new information getting should be stopped.</param>
void UnRegisterSecurity(Security security);
/// <summary>
/// Subscribe on order log for the security.
/// </summary>
/// <param name="security">Security for subscription.</param>
void RegisterOrderLog(Security security);
/// <summary>
/// Unsubscribe from order log for the security.
/// </summary>
/// <param name="security">Security for unsubscription.</param>
void UnRegisterOrderLog(Security security);
/// <summary>
/// Subscribe on the portfolio changes.
/// </summary>
/// <param name="portfolio">Portfolio for subscription.</param>
void RegisterPortfolio(Portfolio portfolio);
/// <summary>
/// Unsubscribe from the portfolio changes.
/// </summary>
/// <param name="portfolio">Portfolio for unsubscription.</param>
void UnRegisterPortfolio(Portfolio portfolio);
/// <summary>
/// Subscribe on news.
/// </summary>
void RegisterNews();
/// <summary>
/// Unsubscribe from news.
/// </summary>
void UnRegisterNews();
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
//
//
// Authors:
// Marek Habersack <mhabersack@novell.com>
//
// Copyright (C) 2010 Novell, Inc. (http://novell.com/)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Diagnostics;
using System.Runtime.Caching;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
using MonoTests.Common;
namespace MonoTests.System.Runtime.Caching
{
public class MemoryCacheTest
{
[Fact]
public void ConstructorParameters()
{
MemoryCache mc;
Assert.Throws<ArgumentNullException>(() =>
{
mc = new MemoryCache(null);
});
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache(string.Empty);
});
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("default");
});
var config = new NameValueCollection();
config.Add("CacheMemoryLimitMegabytes", "invalid");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", "invalid");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PollingInterval", "invalid");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("CacheMemoryLimitMegabytes", "-1");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("CacheMemoryLimitMegabytes", UInt64.MaxValue.ToString());
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", "-1");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", UInt64.MaxValue.ToString());
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", UInt32.MaxValue.ToString());
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", "-10");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", "0");
// Just make sure it doesn't throw any exception
mc = new MemoryCache("MyCache", config);
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", "101");
Assert.Throws<ArgumentException>(() =>
{
mc = new MemoryCache("MyCache", config);
});
// Just make sure it doesn't throw any exception
config.Clear();
config.Add("UnsupportedSetting", "123");
mc = new MemoryCache("MyCache", config);
}
[Fact]
[PlatformSpecific(TestPlatforms.AnyUnix)] // Negative case for "physicalMemoryLimitPercentage" on non Windows
public void PhysicalMemoryLimitNotSupported()
{
var config = new NameValueCollection();
config.Add("PhysicalMemoryLimitPercentage", "99");
Assert.Throws<PlatformNotSupportedException>(() =>
{
new MemoryCache("MyCache", config);
});
}
[Fact]
public void Defaults()
{
var mc = new MemoryCache("MyCache");
Assert.Equal("MyCache", mc.Name);
Assert.Equal(TimeSpan.FromMinutes(2), mc.PollingInterval);
Assert.Equal(
DefaultCacheCapabilities.InMemoryProvider |
DefaultCacheCapabilities.CacheEntryChangeMonitors |
DefaultCacheCapabilities.AbsoluteExpirations |
DefaultCacheCapabilities.SlidingExpirations |
DefaultCacheCapabilities.CacheEntryRemovedCallback |
DefaultCacheCapabilities.CacheEntryUpdateCallback,
mc.DefaultCacheCapabilities);
}
[Fact]
public void DefaultInstanceDefaults()
{
var mc = MemoryCache.Default;
Assert.Equal("Default", mc.Name);
Assert.Equal(TimeSpan.FromMinutes(2), mc.PollingInterval);
Assert.Equal(
DefaultCacheCapabilities.InMemoryProvider |
DefaultCacheCapabilities.CacheEntryChangeMonitors |
DefaultCacheCapabilities.AbsoluteExpirations |
DefaultCacheCapabilities.SlidingExpirations |
DefaultCacheCapabilities.CacheEntryRemovedCallback |
DefaultCacheCapabilities.CacheEntryUpdateCallback,
mc.DefaultCacheCapabilities);
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses "physicalMemoryLimitPercentage" not supported on other platforms
public void ConstructorValues()
{
var config = new NameValueCollection();
config.Add("CacheMemoryLimitMegabytes", "1");
config.Add("pollingInterval", "00:10:00");
var mc = new MemoryCache("MyCache", config);
Assert.Equal(1048576, mc.CacheMemoryLimit);
Assert.Equal(TimeSpan.FromMinutes(10), mc.PollingInterval);
config.Clear();
config.Add("PhysicalMemoryLimitPercentage", "10");
config.Add("CacheMemoryLimitMegabytes", "5");
config.Add("PollingInterval", "01:10:00");
mc = new MemoryCache("MyCache", config);
Assert.Equal(10, mc.PhysicalMemoryLimit);
Assert.Equal(5242880, mc.CacheMemoryLimit);
Assert.Equal(TimeSpan.FromMinutes(70), mc.PollingInterval);
}
[Fact]
public void Indexer()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<ArgumentNullException>(() =>
{
mc[null] = "value";
});
Assert.Throws<ArgumentNullException>(() =>
{
object v = mc[null];
});
Assert.Throws<ArgumentNullException>(() =>
{
mc["key"] = null;
});
mc.Calls.Clear();
mc["key"] = "value";
Assert.Equal(3, mc.Calls.Count);
Assert.Equal("set_this [string key]", mc.Calls[0]);
Assert.Equal("Set (string key, object value, DateTimeOffset absoluteExpiration, string regionName = null)", mc.Calls[1]);
Assert.Equal("Set (string key, object value, CacheItemPolicy policy, string regionName = null)", mc.Calls[2]);
Assert.True(mc.Contains("key"));
mc.Calls.Clear();
object value = mc["key"];
Assert.Equal(1, mc.Calls.Count);
Assert.Equal("get_this [string key]", mc.Calls[0]);
Assert.Equal("value", value);
}
[Fact]
public void Contains()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<ArgumentNullException>(() =>
{
mc.Contains(null);
});
Assert.Throws<NotSupportedException>(() =>
{
mc.Contains("key", "region");
});
mc.Set("key", "value", ObjectCache.InfiniteAbsoluteExpiration);
Assert.True(mc.Contains("key"));
var cip = new CacheItemPolicy();
cip.Priority = CacheItemPriority.NotRemovable;
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(50);
mc.Set("key", "value", cip);
Assert.True(mc.Contains("key"));
// wait past cip.AbsoluteExpiration
Thread.Sleep(500);
// Attempt to retrieve an expired entry
Assert.False(mc.Contains("key"));
}
[Fact]
public void CreateCacheEntryChangeMonitor()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<NotSupportedException>(() =>
{
mc.CreateCacheEntryChangeMonitor(new string[] { "key" }, "region");
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.CreateCacheEntryChangeMonitor(null);
});
Assert.Throws<ArgumentException>(() =>
{
mc.CreateCacheEntryChangeMonitor(new string[] { });
});
Assert.Throws<ArgumentException>(() =>
{
mc.CreateCacheEntryChangeMonitor(new string[] { "key", null });
});
mc.Set("key1", "value1", ObjectCache.InfiniteAbsoluteExpiration);
mc.Set("key2", "value2", ObjectCache.InfiniteAbsoluteExpiration);
mc.Set("key3", "value3", ObjectCache.InfiniteAbsoluteExpiration);
CacheEntryChangeMonitor monitor = mc.CreateCacheEntryChangeMonitor(new string[] { "key1", "key2" });
Assert.NotNull(monitor);
Assert.Equal("System.Runtime.Caching.MemoryCacheEntryChangeMonitor", monitor.GetType().ToString());
Assert.Equal(2, monitor.CacheKeys.Count);
Assert.Equal("key1", monitor.CacheKeys[0]);
Assert.Equal("key2", monitor.CacheKeys[1]);
Assert.Null(monitor.RegionName);
Assert.False(monitor.HasChanged);
// The actual unique id is constructed from key names followed by the hex value of ticks of their last modifed time
Assert.False(string.IsNullOrEmpty(monitor.UniqueId));
monitor = mc.CreateCacheEntryChangeMonitor (new string [] { "key1", "doesnotexist" });
Assert.NotNull (monitor);
Assert.Equal ("System.Runtime.Caching.MemoryCacheEntryChangeMonitor", monitor.GetType ().ToString ());
Assert.Equal (2, monitor.CacheKeys.Count);
Assert.Equal ("key1", monitor.CacheKeys [0]);
Assert.Null (monitor.RegionName);
Assert.True (monitor.HasChanged);
}
[Fact]
public void AddOrGetExisting_String_Object_DateTimeOffset_String()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<ArgumentNullException>(() =>
{
mc.AddOrGetExisting(null, "value", DateTimeOffset.Now);
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.AddOrGetExisting("key", null, DateTimeOffset.Now);
});
Assert.Throws<NotSupportedException>(() =>
{
mc.AddOrGetExisting("key", "value", DateTimeOffset.Now, "region");
});
object value = mc.AddOrGetExisting("key3_A2-1", "value", DateTimeOffset.Now.AddMinutes(1));
Assert.True(mc.Contains("key3_A2-1"));
Assert.Null(value);
mc.Calls.Clear();
value = mc.AddOrGetExisting("key3_A2-1", "value2", DateTimeOffset.Now.AddMinutes(1));
Assert.True(mc.Contains("key3_A2-1"));
Assert.NotNull(value);
Assert.Equal("value", value);
Assert.Equal(2, mc.Calls.Count);
Assert.Equal("AddOrGetExisting (string key, object value, DateTimeOffset absoluteExpiration, string regionName = null)", mc.Calls[0]);
value = mc.AddOrGetExisting("key_expired", "value", DateTimeOffset.MinValue);
Assert.False(mc.Contains("key_expired"));
Assert.Null(value);
}
[Fact]
public void AddOrGetExisting_String_Object_CacheItemPolicy_String()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<ArgumentNullException>(() =>
{
mc.AddOrGetExisting(null, "value", null);
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.AddOrGetExisting("key", null, null);
});
var cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTime.Now.AddMinutes(1);
cip.SlidingExpiration = TimeSpan.FromMinutes(1);
Assert.Throws<ArgumentException>(() =>
{
mc.AddOrGetExisting("key", "value", cip);
});
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.MinValue;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.AddOrGetExisting("key3", "value", cip);
});
Assert.Throws<NotSupportedException>(() =>
{
mc.AddOrGetExisting("key", "value", null, "region");
});
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(500);
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.AddOrGetExisting("key3", "value", cip);
});
cip = new CacheItemPolicy();
cip.Priority = (CacheItemPriority)20;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.AddOrGetExisting("key3", "value", cip);
});
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromTicks(0L);
mc.AddOrGetExisting("key3_A2-1", "value", cip);
Assert.True(mc.Contains("key3_A2-1"));
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(365);
mc.AddOrGetExisting("key3_A2-2", "value", cip);
Assert.True(mc.Contains("key3_A2-2"));
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments arguments) => { };
object value = mc.AddOrGetExisting("key3_A2-3", "value", cip);
Assert.True(mc.Contains("key3_A2-3"));
Assert.Null(value);
mc.Calls.Clear();
value = mc.AddOrGetExisting("key3_A2-3", "value2", null);
Assert.True(mc.Contains("key3_A2-3"));
Assert.NotNull(value);
Assert.Equal("value", value);
Assert.Equal(2, mc.Calls.Count);
Assert.Equal("AddOrGetExisting (string key, object value, CacheItemPolicy policy, string regionName = null)", mc.Calls[0]);
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.MinValue;
value = mc.AddOrGetExisting("key_expired", "value", cip);
Assert.False(mc.Contains("key_expired"));
Assert.Null(value);
}
[Fact]
public void AddOrGetExisting_CacheItem_CacheItemPolicy()
{
var mc = new PokerMemoryCache("MyCache");
CacheItem ci, ci2;
Assert.Throws<ArgumentNullException>(() =>
{
ci = mc.AddOrGetExisting(null, new CacheItemPolicy());
});
ci = new CacheItem("key", "value");
ci2 = mc.AddOrGetExisting(ci, null);
Assert.NotNull(ci2);
Assert.NotEqual(ci, ci2);
Assert.Null(ci2.Value);
Assert.True(mc.Contains(ci.Key));
Assert.Equal(ci.Key, ci2.Key);
ci = new CacheItem("key", "value");
ci2 = mc.AddOrGetExisting(ci, null);
Assert.NotNull(ci2);
Assert.NotEqual(ci, ci2);
Assert.NotNull(ci2.Value);
Assert.Equal(ci.Value, ci2.Value);
Assert.Equal(ci.Key, ci2.Key);
Assert.Throws<ArgumentNullException>(() =>
{
ci = new CacheItem(null, "value");
ci2 = mc.AddOrGetExisting(ci, null);
});
ci = new CacheItem(string.Empty, "value");
ci2 = mc.AddOrGetExisting(ci, null);
Assert.NotNull(ci2);
Assert.NotEqual(ci, ci2);
Assert.Null(ci2.Value);
Assert.True(mc.Contains(ci.Key));
Assert.Equal(ci.Key, ci2.Key);
ci = new CacheItem("key2", null);
// Thrown from:
// at System.Runtime.Caching.MemoryCacheEntry..ctor(String key, Object value, DateTimeOffset absExp, TimeSpan slidingExp, CacheItemPriority priority, Collection`1 dependencies, CacheEntryRemovedCallback removedCallback, MemoryCache cache)
// at System.Runtime.Caching.MemoryCache.AddOrGetExistingInternal(String key, Object value, CacheItemPolicy policy)
// at System.Runtime.Caching.MemoryCache.AddOrGetExisting(CacheItem item, CacheItemPolicy policy)
// at MonoTests.System.Runtime.Caching.MemoryCacheTest.AddOrGetExisting_CacheItem_CacheItemPolicy() in C:\Users\grendel\documents\visual studio 2010\Projects\System.Runtime.Caching.Test\System.Runtime.Caching.Test\System.Runtime.Caching\MemoryCacheTest.cs:line 211
Assert.Throws<ArgumentNullException>(() =>
{
ci2 = mc.AddOrGetExisting(ci, null);
});
ci = new CacheItem("key3", "value");
var cip = new CacheItemPolicy();
cip.UpdateCallback = (CacheEntryUpdateArguments arguments) => { };
Assert.Throws<ArgumentException>(() =>
{
ci2 = mc.AddOrGetExisting(ci, cip);
});
ci = new CacheItem("key3", "value");
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.Now;
cip.SlidingExpiration = TimeSpan.FromTicks(DateTime.Now.Ticks);
Assert.Throws<ArgumentException>(() =>
{
mc.AddOrGetExisting(ci, cip);
});
ci = new CacheItem("key3", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.MinValue;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.AddOrGetExisting(ci, cip);
});
ci = new CacheItem("key4_#B4-2", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromTicks(0L);
mc.AddOrGetExisting(ci, cip);
Assert.True(mc.Contains("key4_#B4-2"));
ci = new CacheItem("key3", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(500);
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.AddOrGetExisting(ci, cip);
});
ci = new CacheItem("key5_#B5-2", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(365);
mc.AddOrGetExisting(ci, cip);
Assert.True(mc.Contains("key5_#B5-2"));
ci = new CacheItem("key3", "value");
cip = new CacheItemPolicy();
cip.Priority = (CacheItemPriority)20;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.AddOrGetExisting(ci, cip);
});
ci = new CacheItem("key3_B7", "value");
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments arguments) => { };
ci2 = mc.AddOrGetExisting(ci, cip);
Assert.True(mc.Contains("key3_B7"));
Assert.NotNull(ci2);
Assert.NotEqual(ci, ci2);
Assert.Null(ci2.Value);
Assert.True(mc.Contains(ci.Key));
Assert.Equal(ci.Key, ci2.Key);
// The entry is never inserted as its expiration date is before now
ci = new CacheItem("key_D1", "value_D1");
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.MinValue;
ci2 = mc.AddOrGetExisting(ci, cip);
Assert.False(mc.Contains("key_D1"));
Assert.NotNull(ci2);
Assert.Null(ci2.Value);
Assert.Equal("key_D1", ci2.Key);
mc.Calls.Clear();
ci = new CacheItem("key_D2", "value_D2");
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.MaxValue;
mc.AddOrGetExisting(ci, cip);
Assert.True(mc.Contains("key_D2"));
Assert.Equal(2, mc.Calls.Count);
Assert.Equal("AddOrGetExisting (CacheItem item, CacheItemPolicy policy)", mc.Calls[0]);
}
[Fact]
public void Set_String_Object_CacheItemPolicy_String()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<NotSupportedException>(() =>
{
mc.Set("key", "value", new CacheItemPolicy(), "region");
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set(null, "value", new CacheItemPolicy());
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set("key", null, new CacheItemPolicy());
});
var cip = new CacheItemPolicy();
cip.UpdateCallback = (CacheEntryUpdateArguments arguments) => { };
cip.RemovedCallback = (CacheEntryRemovedArguments arguments) => { };
Assert.Throws<ArgumentException>(() =>
{
mc.Set("key", "value", cip);
});
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.MinValue;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.Set("key", "value", cip);
});
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromTicks(0L);
mc.Set("key_A1-6", "value", cip);
Assert.True(mc.Contains("key_A1-6"));
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(500);
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.Set("key", "value", cip);
});
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(365);
mc.Set("key_A1-8", "value", cip);
Assert.True(mc.Contains("key_A1-8"));
cip = new CacheItemPolicy();
cip.Priority = (CacheItemPriority)20;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.Set("key", "value", cip);
});
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments arguments) => { };
mc.Set("key_A2", "value_A2", cip);
Assert.True(mc.Contains("key_A2"));
mc.Set("key_A3", "value_A3", new CacheItemPolicy());
Assert.True(mc.Contains("key_A3"));
Assert.Equal("value_A3", mc.Get("key_A3"));
// The entry is never inserted as its expiration date is before now
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.MinValue;
mc.Set("key_A4", "value_A4", cip);
Assert.False(mc.Contains("key_A4"));
mc.Calls.Clear();
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.MaxValue;
mc.Set("key_A5", "value_A5", cip);
Assert.True(mc.Contains("key_A5"));
Assert.Equal(2, mc.Calls.Count);
Assert.Equal("Set (string key, object value, CacheItemPolicy policy, string regionName = null)", mc.Calls[0]);
}
[Fact]
public void Set_String_Object_DateTimeOffset_String()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<NotSupportedException>(() =>
{
mc.Set("key", "value", DateTimeOffset.MaxValue, "region");
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set(null, "value", DateTimeOffset.MaxValue);
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set("key", null, DateTimeOffset.MaxValue);
});
// The entry is never inserted as its expiration date is before now
mc.Set("key_A2", "value_A2", DateTimeOffset.MinValue);
Assert.False(mc.Contains("key_A2"));
mc.Calls.Clear();
mc.Set("key", "value", DateTimeOffset.MaxValue);
Assert.Equal(2, mc.Calls.Count);
Assert.Equal("Set (string key, object value, DateTimeOffset absoluteExpiration, string regionName = null)", mc.Calls[0]);
Assert.Equal("Set (string key, object value, CacheItemPolicy policy, string regionName = null)", mc.Calls[1]);
}
[Fact]
public void Set_CacheItem_CacheItemPolicy()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set(null, new CacheItemPolicy());
});
// Actually thrown from the Set (string, object, CacheItemPolicy, string) overload
var ci = new CacheItem(null, "value");
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set(ci, new CacheItemPolicy());
});
ci = new CacheItem("key", null);
Assert.Throws<ArgumentNullException>(() =>
{
mc.Set(ci, new CacheItemPolicy());
});
ci = new CacheItem("key", "value");
var cip = new CacheItemPolicy();
cip.UpdateCallback = (CacheEntryUpdateArguments arguments) => { };
cip.RemovedCallback = (CacheEntryRemovedArguments arguments) => { };
Assert.Throws<ArgumentException>(() =>
{
mc.Set(ci, cip);
});
ci = new CacheItem("key", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.MinValue;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.Set(ci, cip);
});
ci = new CacheItem("key_A1-6", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromTicks(0L);
mc.Set(ci, cip);
Assert.True(mc.Contains("key_A1-6"));
ci = new CacheItem("key", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(500);
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.Set(ci, cip);
});
ci = new CacheItem("key_A1-8", "value");
cip = new CacheItemPolicy();
cip.SlidingExpiration = TimeSpan.FromDays(365);
mc.Set(ci, cip);
Assert.True(mc.Contains("key_A1-8"));
ci = new CacheItem("key", "value");
cip = new CacheItemPolicy();
cip.Priority = (CacheItemPriority)20;
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mc.Set(ci, cip);
});
ci = new CacheItem("key_A2", "value_A2");
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments arguments) => { };
mc.Set(ci, cip);
Assert.True(mc.Contains("key_A2"));
ci = new CacheItem("key_A3", "value_A3");
mc.Set(ci, new CacheItemPolicy());
Assert.True(mc.Contains("key_A3"));
Assert.Equal("value_A3", mc.Get("key_A3"));
// The entry is never inserted as its expiration date is before now
ci = new CacheItem("key_A4", "value");
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.MinValue;
mc.Set(ci, cip);
Assert.False(mc.Contains("key_A4"));
ci = new CacheItem("key_A5", "value");
mc.Calls.Clear();
mc.Set(ci, new CacheItemPolicy());
Assert.Equal(2, mc.Calls.Count);
Assert.Equal("Set (CacheItem item, CacheItemPolicy policy)", mc.Calls[0]);
Assert.Equal("Set (string key, object value, CacheItemPolicy policy, string regionName = null)", mc.Calls[1]);
}
[Fact]
public void Remove()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<NotSupportedException>(() =>
{
mc.Remove("key", "region");
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.Remove(null);
});
bool callbackInvoked;
CacheEntryRemovedReason reason = (CacheEntryRemovedReason)1000;
var cip = new CacheItemPolicy();
cip.Priority = CacheItemPriority.NotRemovable;
mc.Set("key2", "value1", cip);
object value = mc.Remove("key2");
Assert.NotNull(value);
Assert.False(mc.Contains("key2"));
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
callbackInvoked = true;
reason = args.RemovedReason;
};
mc.Set("key", "value", cip);
callbackInvoked = false;
reason = (CacheEntryRemovedReason)1000;
value = mc.Remove("key");
Assert.NotNull(value);
Assert.True(callbackInvoked);
Assert.Equal(CacheEntryRemovedReason.Removed, reason);
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
callbackInvoked = true;
reason = args.RemovedReason;
throw new ApplicationException("test");
};
mc.Set("key", "value", cip);
callbackInvoked = false;
reason = (CacheEntryRemovedReason)1000;
value = mc.Remove("key");
Assert.NotNull(value);
Assert.True(callbackInvoked);
Assert.Equal(CacheEntryRemovedReason.Removed, reason);
cip = new CacheItemPolicy();
cip.UpdateCallback = (CacheEntryUpdateArguments args) =>
{
callbackInvoked = true;
reason = args.RemovedReason;
};
mc.Set("key", "value", cip);
callbackInvoked = false;
reason = (CacheEntryRemovedReason)1000;
value = mc.Remove("key");
Assert.NotNull(value);
Assert.False(callbackInvoked);
cip = new CacheItemPolicy();
cip.UpdateCallback = (CacheEntryUpdateArguments args) =>
{
callbackInvoked = true;
reason = args.RemovedReason;
throw new ApplicationException("test");
};
mc.Set("key", "value", cip);
callbackInvoked = false;
reason = (CacheEntryRemovedReason)1000;
value = mc.Remove("key");
Assert.NotNull(value);
Assert.False(callbackInvoked);
}
[Fact]
public void GetValues()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<ArgumentNullException>(() =>
{
mc.GetValues((string[])null);
});
Assert.Throws<NotSupportedException>(() =>
{
mc.GetValues(new string[] { }, "region");
});
Assert.Throws<ArgumentException>(() =>
{
mc.GetValues(new string[] { "key", null });
});
IDictionary<string, object> value = mc.GetValues(new string[] { });
Assert.Null(value);
mc.Set("key1", "value1", null);
mc.Set("key2", "value2", null);
mc.Set("key3", "value3", null);
Assert.True(mc.Contains("key1"));
Assert.True(mc.Contains("key2"));
Assert.True(mc.Contains("key3"));
value = mc.GetValues(new string[] { "key1", "key3" });
Assert.NotNull(value);
Assert.Equal(2, value.Count);
Assert.Equal("value1", value["key1"]);
Assert.Equal("value3", value["key3"]);
Assert.Equal(typeof(Dictionary<string, object>), value.GetType());
// MSDN says the number of items in the returned dictionary should be the same as in the
// 'keys' collection - this is not the case. The returned dictionary contains only entries for keys
// that exist in the cache.
value = mc.GetValues(new string[] { "key1", "key3", "nosuchkey" });
Assert.NotNull(value);
Assert.Equal(2, value.Count);
Assert.Equal("value1", value["key1"]);
Assert.Equal("value3", value["key3"]);
Assert.False(value.ContainsKey("Key1"));
}
[Fact]
public void ChangeMonitors()
{
bool removed = false;
var mc = new PokerMemoryCache("MyCache");
var cip = new CacheItemPolicy();
var monitor = new PokerChangeMonitor();
cip.ChangeMonitors.Add(monitor);
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
removed = true;
};
mc.Set("key", "value", cip);
Assert.Equal(0, monitor.Calls.Count);
monitor.SignalChange();
Assert.True(removed);
bool onChangedCalled = false;
monitor = new PokerChangeMonitor();
monitor.NotifyOnChanged((object state) =>
{
onChangedCalled = true;
});
cip = new CacheItemPolicy();
cip.ChangeMonitors.Add(monitor);
// Thrown by ChangeMonitor.NotifyOnChanged
Assert.Throws<InvalidOperationException>(() =>
{
mc.Set("key1", "value1", cip);
});
Assert.False(onChangedCalled);
}
// Due to internal implementation details Trim has very few easily verifiable scenarios
[Fact]
public void Trim()
{
var config = new NameValueCollection();
config["__MonoEmulateOneCPU"] = "true";
var mc = new MemoryCache("MyCache", config);
var numCpuCores = Environment.ProcessorCount;
var numItems = numCpuCores > 1 ? numCpuCores / 2 : 1;
for (int i = 0; i < numItems;)
{
var key = "key" + i*i*i + "key" + ++i;
mc.Set(key, "value" + i.ToString(), null);
}
Assert.Equal(numItems, mc.GetCount());
// Trimming 75% for such a small number of items (supposedly each in its cache store) will end up trimming all of them
long trimmed = mc.Trim(75);
Assert.Equal(numItems, trimmed);
Assert.Equal(0, mc.GetCount());
mc = new MemoryCache("MyCache", config);
var cip = new CacheItemPolicy();
cip.Priority = CacheItemPriority.NotRemovable;
for (int i = 0; i < 11; i++)
{
mc.Set("key" + i.ToString(), "value" + i.ToString(), cip);
}
Assert.Equal(11, mc.GetCount());
trimmed = mc.Trim(50);
Assert.Equal(11, mc.GetCount());
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses "physicalMemoryLimitPercentage" not supported on other platforms
public void TestExpiredGetValues()
{
var config = new NameValueCollection();
config["cacheMemoryLimitMegabytes"] = 0.ToString();
config["physicalMemoryLimitPercentage"] = 100.ToString();
config["pollingInterval"] = new TimeSpan(0, 0, 10).ToString();
using (var mc = new MemoryCache("TestExpiredGetValues", config))
{
Assert.Equal(0, mc.GetCount());
var keys = new List<string>();
// add some short duration entries
for (int i = 0; i < 10; i++)
{
var key = "short-" + i;
var expireAt = DateTimeOffset.Now.AddMilliseconds(50);
mc.Add(key, i.ToString(), expireAt);
keys.Add(key);
}
Assert.Equal(10, mc.GetCount());
// wait past expiration and call GetValues() - this does not affect the count
Thread.Sleep(100);
mc.GetValues(keys);
Assert.Equal(0, mc.GetCount());
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses "physicalMemoryLimitPercentage" not supported on other platforms
[OuterLoop] // makes long wait
public void TestCacheSliding()
{
var config = new NameValueCollection();
config["cacheMemoryLimitMegabytes"] = 0.ToString();
config["physicalMemoryLimitPercentage"] = 100.ToString();
config["pollingInterval"] = new TimeSpan(0, 0, 1).ToString();
using (var mc = new MemoryCache("TestCacheSliding", config))
{
Assert.Equal(0, mc.GetCount());
var cip = new CacheItemPolicy();
// The sliding expiration timeout has to be greater than 1 second because
// .NET implementation ignores timeouts updates smaller than
// CacheExpires.MIN_UPDATE_DELTA which is equal to 1.
const int SlidingExpirationThresholdMSec = 4000;
cip.SlidingExpiration = TimeSpan.FromMilliseconds(SlidingExpirationThresholdMSec);
mc.Add("slidingtest", "42", cip);
mc.Add("expire1", "1", cip);
mc.Add("expire2", "2", cip);
mc.Add("expire3", "3", cip);
mc.Add("expire4", "4", cip);
mc.Add("expire5", "5", cip);
Assert.Equal(6, mc.GetCount());
// The loop below would sleep for ~5 seconds total (in 50 intervals).
// Each of these intervals is only supposed to be ~100ms.
// However due to concurrency with other tests and various system conditions,
// we observe occasional delays that are much longer than the SlidingExpirationThresholdMSec
// expiration period which causes the "slidingtest" cache item to expire
Stopwatch sw = new Stopwatch();
for (int i = 0; i < 50; i++)
{
sw.Restart();
Thread.Sleep(100);
var item = mc.Get("slidingtest");
sw.Stop();
if (sw.ElapsedMilliseconds < SlidingExpirationThresholdMSec)
{
Assert.NotEqual(null, item);
}
else
{
// for the sake of simplicity skip an inversed assert here (Assert.Equal(null, item))
// (to avoid further complicating the test as we would need to address a few more subtle timing cases)
}
}
Assert.Null(mc.Get("expire1"));
Assert.Null(mc.Get("expire2"));
Assert.Null(mc.Get("expire3"));
Assert.Null(mc.Get("expire4"));
Assert.Null(mc.Get("expire5"));
Assert.Equal(1, mc.GetCount());
Thread.Sleep(SlidingExpirationThresholdMSec + 1000);
Assert.Null(mc.Get("slidingtest"));
Assert.Equal(0, mc.GetCount());
}
}
}
public class MemoryCacheTestExpires1
{
[Fact]
[OuterLoop] // makes long wait
public async Task TimedExpirationAsync()
{
bool removed = false;
CacheEntryRemovedReason reason = CacheEntryRemovedReason.CacheSpecificEviction;
int sleepPeriod = 20000;
var mc = new PokerMemoryCache("MyCache");
var cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
removed = true;
reason = args.RemovedReason;
};
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(50);
mc.Set("key", "value", cip);
// Wait past cip.AbsoluteExpiration
Thread.Sleep(500);
object value = mc.Get("key");
Assert.Null(value);
// Rather than waiting for the expiration callback to fire,
// we replace the cache item and verify that the reason is still Expired
mc.Set("key", "value2", cip);
Assert.True(removed);
Assert.Equal(CacheEntryRemovedReason.Expired, reason);
removed = false;
cip = new CacheItemPolicy();
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
removed = true;
reason = args.RemovedReason;
};
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(50);
mc.Set("key", "value", cip);
await Task.Delay(sleepPeriod);
Assert.Null(mc.Get("key"));
Assert.True(removed);
Assert.Equal(CacheEntryRemovedReason.Expired, reason);
}
}
public class MemoryCacheTestExpires11
{
[Fact]
[OuterLoop] // makes long wait
public async Task TimedExpirationAsync()
{
int sleepPeriod = 20000;
var mc = new PokerMemoryCache("MyCache");
var cip = new CacheItemPolicy();
int expiredCount = 0;
object expiredCountLock = new object();
CacheEntryRemovedCallback removedCb = (CacheEntryRemovedArguments args) =>
{
lock (expiredCountLock)
{
expiredCount++;
}
};
cip = new CacheItemPolicy();
cip.RemovedCallback = removedCb;
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(20);
mc.Set("key1", "value1", cip);
cip = new CacheItemPolicy();
cip.RemovedCallback = removedCb;
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(200);
mc.Set("key2", "value2", cip);
cip = new CacheItemPolicy();
cip.RemovedCallback = removedCb;
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(600);
mc.Set("key3", "value3", cip);
cip = new CacheItemPolicy();
cip.RemovedCallback = removedCb;
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(sleepPeriod + 55500);
mc.Set("key4", "value4", cip);
await Task.Delay(sleepPeriod);
Assert.Null(mc.Get("key1"));
Assert.Null(mc.Get("key2"));
Assert.Null(mc.Get("key3"));
Assert.NotNull(mc.Get("key4"));
Assert.Equal(3, expiredCount);
}
}
public class MemoryCacheTestExpires2
{
[Fact]
[OuterLoop] // makes long wait
public async Task GetEnumeratorAsync()
{
var mc = new PokerMemoryCache("MyCache");
// This one is a Hashtable enumerator
IEnumerator enumerator = ((IEnumerable)mc).GetEnumerator();
// This one is a Dictionary <string, object> enumerator
IEnumerator enumerator2 = mc.DoGetEnumerator();
Assert.NotNull(enumerator);
Assert.NotNull(enumerator2);
Assert.True(enumerator.GetType() != enumerator2.GetType());
mc.Set("key1", "value1", null);
mc.Set("key2", "value2", null);
mc.Set("key3", "value3", null);
bool expired4 = false;
var cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTime.Now.AddMilliseconds(50);
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
expired4 = true;
};
mc.Set("key4", "value4", cip);
// wait past "key4" AbsoluteExpiration
Thread.Sleep(500);
enumerator = ((IEnumerable)mc).GetEnumerator();
int count = 0;
while (enumerator.MoveNext())
{
count++;
}
Assert.Equal(3, count);
bool expired5 = false;
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTime.Now.AddMilliseconds(50);
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
expired5 = true;
};
mc.Set("key5", "value5", cip);
await Task.Delay(20500);
enumerator2 = mc.DoGetEnumerator();
count = 0;
while (enumerator2.MoveNext())
{
count++;
}
Assert.True(expired4);
Assert.True(expired5);
Assert.Equal(3, count);
}
}
public class MemoryCacheTestExpires3
{
[Fact]
[OuterLoop] // makes long wait
public async Task GetCacheItem()
{
var mc = new PokerMemoryCache("MyCache");
Assert.Throws<NotSupportedException>(() =>
{
mc.GetCacheItem("key", "region");
});
Assert.Throws<ArgumentNullException>(() =>
{
mc.GetCacheItem(null);
});
CacheItem value;
mc.Set("key", "value", null);
value = mc.GetCacheItem("key");
Assert.NotNull(value);
Assert.Equal("value", value.Value);
Assert.Equal("key", value.Key);
value = mc.GetCacheItem("doesnotexist");
Assert.Null(value);
var cip = new CacheItemPolicy();
bool callbackInvoked = false;
CacheEntryRemovedReason reason = (CacheEntryRemovedReason)1000;
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(50);
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
callbackInvoked = true;
reason = args.RemovedReason;
};
mc.Set("key", "value", cip);
// wait past the expiration time and verify that the item is gone
await Task.Delay(500);
value = mc.GetCacheItem("key");
Assert.Null(value);
// add a new item with the same key
cip = new CacheItemPolicy();
cip.AbsoluteExpiration = DateTimeOffset.Now.AddMilliseconds(50);
cip.RemovedCallback = (CacheEntryRemovedArguments args) =>
{
callbackInvoked = true;
reason = args.RemovedReason;
throw new ApplicationException("test");
};
mc.Set("key", "value", cip);
// and verify that the old item callback is called
Assert.True(callbackInvoked);
Assert.Equal(CacheEntryRemovedReason.Expired, reason);
callbackInvoked = false;
reason = (CacheEntryRemovedReason)1000;
// wait for both expiration and the callback of the new item
await Task.Delay(20500);
value = mc.GetCacheItem("key");
Assert.Null(value);
Assert.True(callbackInvoked);
Assert.Equal(CacheEntryRemovedReason.Expired, reason);
}
}
public class MemoryCacheTestExpires4
{
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses "physicalMemoryLimitPercentage" not supported on other platforms
public async Task TestCacheShrink()
{
const int HEAP_RESIZE_THRESHOLD = 8192 + 2;
const int HEAP_RESIZE_SHORT_ENTRIES = 2048;
const int HEAP_RESIZE_LONG_ENTRIES = HEAP_RESIZE_THRESHOLD - HEAP_RESIZE_SHORT_ENTRIES;
var config = new NameValueCollection();
config["cacheMemoryLimitMegabytes"] = 0.ToString();
config["physicalMemoryLimitPercentage"] = 100.ToString();
config["pollingInterval"] = new TimeSpan(0, 0, 1).ToString();
using (var mc = new MemoryCache("TestCacheShrink", config))
{
Assert.Equal(0, mc.GetCount());
// add some short duration entries
for (int i = 0; i < HEAP_RESIZE_SHORT_ENTRIES; i++)
{
var expireAt = DateTimeOffset.Now.AddSeconds(3);
mc.Add("short-" + i, i.ToString(), expireAt);
}
Assert.Equal(HEAP_RESIZE_SHORT_ENTRIES, mc.GetCount());
// add some long duration entries
for (int i = 0; i < HEAP_RESIZE_LONG_ENTRIES; i++)
{
var expireAt = DateTimeOffset.Now.AddSeconds(42);
mc.Add("long-" + i, i.ToString(), expireAt);
}
Assert.Equal(HEAP_RESIZE_LONG_ENTRIES + HEAP_RESIZE_SHORT_ENTRIES, mc.GetCount());
// wait past the short duration items expiration time
await Task.Delay(4000);
/// the following will also shrink the size of the cache
for (int i = 0; i < HEAP_RESIZE_SHORT_ENTRIES; i++)
{
Assert.Null(mc.Get("short-" + i));
}
Assert.Equal(HEAP_RESIZE_LONG_ENTRIES, mc.GetCount());
// add some new items into the cache, this will grow the cache again
for (int i = 0; i < HEAP_RESIZE_LONG_ENTRIES; i++)
{
mc.Add("final-" + i, i.ToString(), DateTimeOffset.Now.AddSeconds(4));
}
Assert.Equal(HEAP_RESIZE_LONG_ENTRIES + HEAP_RESIZE_LONG_ENTRIES, mc.GetCount());
}
}
}
public class MemoryCacheTestExpires5
{
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Uses "physicalMemoryLimitPercentage" not supported on other platforms
public async Task TestCacheExpiryOrdering()
{
var config = new NameValueCollection();
config["cacheMemoryLimitMegabytes"] = 0.ToString();
config["physicalMemoryLimitPercentage"] = 100.ToString();
config["pollingInterval"] = new TimeSpan(0, 0, 1).ToString();
using (var mc = new MemoryCache("TestCacheExpiryOrdering", config))
{
Assert.Equal(0, mc.GetCount());
// add long lived items into the cache first
for (int i = 0; i < 100; i++)
{
var cip = new CacheItemPolicy();
cip.SlidingExpiration = new TimeSpan(0, 0, 4);
mc.Add("long-" + i, i, cip);
}
Assert.Equal(100, mc.GetCount());
// add shorter lived items into the cache, these should expire first
for (int i = 0; i < 100; i++)
{
var cip = new CacheItemPolicy();
cip.SlidingExpiration = new TimeSpan(0, 0, 1);
mc.Add("short-" + i, i, cip);
}
Assert.Equal(200, mc.GetCount());
await Task.Delay(2000);
for (int i = 0; i < 100; i++)
{
Assert.Null(mc.Get("short-" + i));
}
Assert.Equal(100, mc.GetCount());
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Framework.Console;
using OpenSim.Framework.Monitoring;
using OpenSim.Region.ClientStack.LindenUDP;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.Region.OptionalModules.UDP.Linden
{
/// <summary>
/// A module that just holds commands for inspecting the current state of the Linden UDP stack.
/// </summary>
/// <remarks>
/// All actual client stack functionality remains in OpenSim.Region.ClientStack.LindenUDP
/// </remarks>
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "LindenUDPInfoModule")]
public class LindenUDPInfoModule : ISharedRegionModule
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
protected Dictionary<UUID, Scene> m_scenes = new Dictionary<UUID, Scene>();
public string Name { get { return "Linden UDP Module"; } }
public Type ReplaceableInterface { get { return null; } }
public void Initialise(IConfigSource source)
{
// m_log.DebugFormat("[LINDEN UDP INFO MODULE]: INITIALIZED MODULE");
}
public void PostInitialise()
{
// m_log.DebugFormat("[LINDEN UDP INFO MODULE]: POST INITIALIZED MODULE");
}
public void Close()
{
// m_log.DebugFormat("[LINDEN UDP INFO MODULE]: CLOSED MODULE");
}
public void AddRegion(Scene scene)
{
// m_log.DebugFormat("[LINDEN UDP INFO MODULE]: REGION {0} ADDED", scene.RegionInfo.RegionName);
lock (m_scenes)
m_scenes[scene.RegionInfo.RegionID] = scene;
scene.AddCommand(
"Comms", this, "show pqueues",
"show pqueues [full]",
"Show priority queue data for each client",
"Without the 'full' option, only root agents are shown."
+ " With the 'full' option child agents are also shown.",
(mod, cmd) => MainConsole.Instance.Output(GetPQueuesReport(cmd)));
scene.AddCommand(
"Comms", this, "show queues",
"show queues [full]",
"Show queue data for each client",
"Without the 'full' option, only root agents are shown.\n"
+ "With the 'full' option child agents are also shown.\n\n"
+ "Type - Rt is a root (avatar) client whilst cd is a child (neighbour interacting) client.\n"
+ "Since Last In - Time in milliseconds since last packet received.\n"
+ "Pkts In - Number of packets processed from the client.\n"
+ "Pkts Out - Number of packets sent to the client.\n"
+ "Pkts Resent - Number of packets resent to the client.\n"
+ "Bytes Unacked - Number of bytes transferred to the client that are awaiting acknowledgement.\n"
+ "Q Pkts * - Number of packets of various types (land, wind, etc.) to be sent to the client that are waiting for available bandwidth.\n",
(mod, cmd) => MainConsole.Instance.Output(GetQueuesReport(cmd)));
scene.AddCommand(
"Comms", this, "show image queues",
"show image queues <first-name> <last-name>",
"Show the image queues (textures downloaded via UDP) for a particular client.",
(mod, cmd) => MainConsole.Instance.Output(GetImageQueuesReport(cmd)));
scene.AddCommand(
"Comms", this, "clear image queues",
"clear image queues <first-name> <last-name>",
"Clear the image queues (textures downloaded via UDP) for a particular client.",
(mod, cmd) => MainConsole.Instance.Output(HandleImageQueuesClear(cmd)));
scene.AddCommand(
"Comms", this, "show throttles",
"show throttles [full]",
"Show throttle settings for each client and for the server overall",
"Without the 'full' option, only root agents are shown."
+ " With the 'full' option child agents are also shown.",
(mod, cmd) => MainConsole.Instance.Output(GetThrottlesReport(cmd)));
scene.AddCommand(
"Comms", this, "emergency-monitoring",
"emergency-monitoring",
"Go on/off emergency monitoring mode",
"Go on/off emergency monitoring mode",
HandleEmergencyMonitoring);
}
public void RemoveRegion(Scene scene)
{
// m_log.DebugFormat("[LINDEN UDP INFO MODULE]: REGION {0} REMOVED", scene.RegionInfo.RegionName);
lock (m_scenes)
m_scenes.Remove(scene.RegionInfo.RegionID);
}
public void RegionLoaded(Scene scene)
{
// m_log.DebugFormat("[LINDEN UDP INFO MODULE]: REGION {0} LOADED", scene.RegionInfo.RegionName);
}
protected string HandleImageQueuesClear(string[] cmd)
{
if (cmd.Length != 5)
return "Usage: image queues clear <first-name> <last-name>";
string firstName = cmd[3];
string lastName = cmd[4];
List<ScenePresence> foundAgents = new List<ScenePresence>();
lock (m_scenes)
{
foreach (Scene scene in m_scenes.Values)
{
ScenePresence sp = scene.GetScenePresence(firstName, lastName);
if (sp != null)
foundAgents.Add(sp);
}
}
if (foundAgents.Count == 0)
return string.Format("No agents found for {0} {1}", firstName, lastName);
StringBuilder report = new StringBuilder();
foreach (ScenePresence agent in foundAgents)
{
LLClientView client = agent.ControllingClient as LLClientView;
if (client == null)
return "This command is only supported for LLClientView";
int requestsDeleted = client.ImageManager.ClearImageQueue();
report.AppendFormat(
"In region {0} ({1} agent) cleared {2} requests\n",
agent.Scene.RegionInfo.RegionName, agent.IsChildAgent ? "child" : "root", requestsDeleted);
}
return report.ToString();
}
protected void HandleEmergencyMonitoring(string module, string[] cmd)
{
bool mode = true;
if (cmd.Length == 1 || (cmd.Length > 1 && cmd[1] == "on"))
{
mode = true;
MainConsole.Instance.Output("Emergency Monitoring ON");
}
else
{
mode = false;
MainConsole.Instance.Output("Emergency Monitoring OFF");
}
foreach (Scene s in m_scenes.Values)
s.EmergencyMonitoring = mode;
}
protected string GetColumnEntry(string entry, int maxLength, int columnPadding)
{
return string.Format(
"{0,-" + maxLength + "}{1,-" + columnPadding + "}",
entry.Length > maxLength ? entry.Substring(0, maxLength) : entry,
"");
}
/// <summary>
/// Generate UDP Queue data report for each client
/// </summary>
/// <param name="showParams"></param>
/// <returns></returns>
protected string GetPQueuesReport(string[] showParams)
{
bool showChildren = false;
string pname = "";
if (showParams.Length > 2 && showParams[2] == "full")
showChildren = true;
else if (showParams.Length > 3)
pname = showParams[2] + " " + showParams[3];
StringBuilder report = new StringBuilder();
int columnPadding = 2;
int maxNameLength = 18;
int maxRegionNameLength = 14;
int maxTypeLength = 4;
// int totalInfoFieldsLength = maxNameLength + columnPadding + maxRegionNameLength + columnPadding + maxTypeLength + columnPadding;
report.Append(GetColumnEntry("User", maxNameLength, columnPadding));
report.Append(GetColumnEntry("Region", maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry("Type", maxTypeLength, columnPadding));
report.AppendFormat(
"{0,7} {1,7} {2,7} {3,7} {4,7} {5,7} {6,7} {7,7} {8,7} {9,7} {10,7} {11,7}\n",
"Pri 0",
"Pri 1",
"Pri 2",
"Pri 3",
"Pri 4",
"Pri 5",
"Pri 6",
"Pri 7",
"Pri 8",
"Pri 9",
"Pri 10",
"Pri 11");
lock (m_scenes)
{
foreach (Scene scene in m_scenes.Values)
{
scene.ForEachClient(
delegate(IClientAPI client)
{
if (client is LLClientView)
{
bool isChild = client.SceneAgent.IsChildAgent;
if (isChild && !showChildren)
return;
string name = client.Name;
if (pname != "" && name != pname)
return;
string regionName = scene.RegionInfo.RegionName;
report.Append(GetColumnEntry(name, maxNameLength, columnPadding));
report.Append(GetColumnEntry(regionName, maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry(isChild ? "Cd" : "Rt", maxTypeLength, columnPadding));
report.AppendLine(((LLClientView)client).EntityUpdateQueue.ToString());
}
});
}
}
return report.ToString();
}
/// <summary>
/// Generate an image queue report
/// </summary>
/// <param name="showParams"></param>
/// <returns></returns>
private string GetImageQueuesReport(string[] showParams)
{
if (showParams.Length < 5 || showParams.Length > 6)
return "Usage: image queues show <first-name> <last-name> [full]";
string firstName = showParams[3];
string lastName = showParams[4];
bool showChildAgents = showParams.Length == 6;
List<ScenePresence> foundAgents = new List<ScenePresence>();
lock (m_scenes)
{
foreach (Scene scene in m_scenes.Values)
{
ScenePresence sp = scene.GetScenePresence(firstName, lastName);
if (sp != null && (showChildAgents || !sp.IsChildAgent))
foundAgents.Add(sp);
}
}
if (foundAgents.Count == 0)
return string.Format("No agents found for {0} {1}", firstName, lastName);
StringBuilder report = new StringBuilder();
foreach (ScenePresence agent in foundAgents)
{
LLClientView client = agent.ControllingClient as LLClientView;
if (client == null)
return "This command is only supported for LLClientView";
J2KImage[] images = client.ImageManager.GetImages();
report.AppendFormat(
"In region {0} ({1} agent)\n",
agent.Scene.RegionInfo.RegionName, agent.IsChildAgent ? "child" : "root");
report.AppendFormat("Images in queue: {0}\n", images.Length);
if (images.Length > 0)
{
report.AppendFormat(
"{0,-36} {1,-8} {2,-10} {3,-9} {4,-9} {5,-7}\n",
"Texture ID",
"Last Seq",
"Priority",
"Start Pkt",
"Has Asset",
"Decoded");
foreach (J2KImage image in images)
report.AppendFormat(
"{0,36} {1,8} {2,10} {3,10} {4,9} {5,7}\n",
image.TextureID, image.LastSequence, image.Priority, image.StartPacket, image.HasAsset, image.IsDecoded);
}
}
return report.ToString();
}
/// <summary>
/// Generate UDP Queue data report for each client
/// </summary>
/// <param name="showParams"></param>
/// <returns></returns>
protected string GetQueuesReport(string[] showParams)
{
bool showChildren = false;
string pname = "";
if (showParams.Length > 2 && showParams[2] == "full")
showChildren = true;
else if (showParams.Length > 3)
pname = showParams[2] + " " + showParams[3];
StringBuilder report = new StringBuilder();
int columnPadding = 2;
int maxNameLength = 18;
int maxRegionNameLength = 14;
int maxTypeLength = 4;
int totalInfoFieldsLength
= maxNameLength + columnPadding
+ maxRegionNameLength + columnPadding
+ maxTypeLength + columnPadding;
report.Append(GetColumnEntry("User", maxNameLength, columnPadding));
report.Append(GetColumnEntry("Region", maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry("Type", maxTypeLength, columnPadding));
report.AppendFormat(
"{0,7} {1,7} {2,7} {3,7} {4,9} {5,7} {6,7} {7,7} {8,7} {9,7} {10,8} {11,7} {12,7}\n",
"Since",
"Pkts",
"Pkts",
"Pkts",
"Bytes",
"Q Pkts",
"Q Pkts",
"Q Pkts",
"Q Pkts",
"Q Pkts",
"Q Pkts",
"Q Pkts",
"Q Pkts");
report.AppendFormat("{0,-" + totalInfoFieldsLength + "}", "");
report.AppendFormat(
"{0,7} {1,7} {2,7} {3,7} {4,9} {5,7} {6,7} {7,7} {8,7} {9,7} {10,8} {11,7} {12,7}\n",
"Last In",
"In",
"Out",
"Resent",
"Unacked",
"Resend",
"Land",
"Wind",
"Cloud",
"Task",
"Texture",
"Asset",
"State");
lock (m_scenes)
{
foreach (Scene scene in m_scenes.Values)
{
scene.ForEachClient(
delegate(IClientAPI client)
{
bool isChild = client.SceneAgent.IsChildAgent;
if (isChild && !showChildren)
return;
string name = client.Name;
if (pname != "" && name != pname)
return;
string regionName = scene.RegionInfo.RegionName;
report.Append(GetColumnEntry(name, maxNameLength, columnPadding));
report.Append(GetColumnEntry(regionName, maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry(isChild ? "Cd" : "Rt", maxTypeLength, columnPadding));
if (client is IStatsCollector)
{
IStatsCollector stats = (IStatsCollector)client;
report.AppendLine(stats.Report());
}
});
}
}
return report.ToString();
}
/// <summary>
/// Show throttle data
/// </summary>
/// <param name="showParams"></param>
/// <returns></returns>
protected string GetThrottlesReport(string[] showParams)
{
bool showChildren = false;
string pname = "";
if (showParams.Length > 2 && showParams[2] == "full")
showChildren = true;
else if (showParams.Length > 3)
pname = showParams[2] + " " + showParams[3];
StringBuilder report = new StringBuilder();
int columnPadding = 2;
int maxNameLength = 18;
int maxRegionNameLength = 14;
int maxTypeLength = 4;
int totalInfoFieldsLength = maxNameLength + columnPadding + maxRegionNameLength + columnPadding + maxTypeLength + columnPadding;
report.Append(GetColumnEntry("User", maxNameLength, columnPadding));
report.Append(GetColumnEntry("Region", maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry("Type", maxTypeLength, columnPadding));
report.AppendFormat(
"{0,7} {1,8} {2,7} {3,7} {4,7} {5,7} {6,9} {7,7}\n",
"Total",
"Resend",
"Land",
"Wind",
"Cloud",
"Task",
"Texture",
"Asset");
report.AppendFormat("{0,-" + totalInfoFieldsLength + "}", "");
report.AppendFormat(
"{0,7} {1,8} {2,7} {3,7} {4,7} {5,7} {6,9} {7,7}",
"kb/s",
"kb/s",
"kb/s",
"kb/s",
"kb/s",
"kb/s",
"kb/s",
"kb/s");
report.AppendLine();
bool firstClient = true;
lock (m_scenes)
{
foreach (Scene scene in m_scenes.Values)
{
scene.ForEachClient(
delegate(IClientAPI client)
{
if (client is LLClientView)
{
LLClientView llClient = client as LLClientView;
if (firstClient)
{
report.AppendLine(GetServerThrottlesReport(llClient.UDPServer));
firstClient = false;
}
bool isChild = client.SceneAgent.IsChildAgent;
if (isChild && !showChildren)
return;
string name = client.Name;
if (pname != "" && name != pname)
return;
string regionName = scene.RegionInfo.RegionName;
LLUDPClient llUdpClient = llClient.UDPClient;
ClientInfo ci = llUdpClient.GetClientInfo();
report.Append(GetColumnEntry(name, maxNameLength, columnPadding));
report.Append(GetColumnEntry(regionName, maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry(isChild ? "Cd" : "Rt", maxTypeLength, columnPadding));
report.AppendFormat(
"{0,7} {1,8} {2,7} {3,7} {4,7} {5,7} {6,9} {7,7}",
(ci.totalThrottle * 8) / 1000,
(ci.resendThrottle * 8) / 1000,
(ci.landThrottle * 8) / 1000,
(ci.windThrottle * 8) / 1000,
(ci.cloudThrottle * 8) / 1000,
(ci.taskThrottle * 8) / 1000,
(ci.textureThrottle * 8) / 1000,
(ci.assetThrottle * 8) / 1000);
report.AppendLine();
}
});
}
}
return report.ToString();
}
protected string GetServerThrottlesReport(LLUDPServer udpServer)
{
StringBuilder report = new StringBuilder();
int columnPadding = 2;
int maxNameLength = 18;
int maxRegionNameLength = 14;
int maxTypeLength = 4;
string name = "SERVER AGENT RATES";
report.Append(GetColumnEntry(name, maxNameLength, columnPadding));
report.Append(GetColumnEntry("-", maxRegionNameLength, columnPadding));
report.Append(GetColumnEntry("-", maxTypeLength, columnPadding));
ThrottleRates throttleRates = udpServer.ThrottleRates;
report.AppendFormat(
"{0,7} {1,8} {2,7} {3,7} {4,7} {5,7} {6,9} {7,7}",
(throttleRates.Total * 8) / 1000,
(throttleRates.Resend * 8) / 1000,
(throttleRates.Land * 8) / 1000,
(throttleRates.Wind * 8) / 1000,
(throttleRates.Cloud * 8) / 1000,
(throttleRates.Task * 8) / 1000,
(throttleRates.Texture * 8) / 1000,
(throttleRates.Asset * 8) / 1000);
return report.ToString();
}
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace SelfLoadSoftDelete.Business.ERCLevel
{
/// <summary>
/// H04_SubContinent (editable child object).<br/>
/// This is a generated base class of <see cref="H04_SubContinent"/> business object.
/// </summary>
/// <remarks>
/// This class contains one child collection:<br/>
/// - <see cref="H05_CountryObjects"/> of type <see cref="H05_CountryColl"/> (1:M relation to <see cref="H06_Country"/>)<br/>
/// This class is an item of <see cref="H03_SubContinentColl"/> collection.
/// </remarks>
[Serializable]
public partial class H04_SubContinent : BusinessBase<H04_SubContinent>
{
#region Static Fields
private static int _lastID;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="SubContinent_ID"/> property.
/// </summary>
public static readonly PropertyInfo<int> SubContinent_IDProperty = RegisterProperty<int>(p => p.SubContinent_ID, "SubContinents ID");
/// <summary>
/// Gets the SubContinents ID.
/// </summary>
/// <value>The SubContinents ID.</value>
public int SubContinent_ID
{
get { return GetProperty(SubContinent_IDProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="SubContinent_Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> SubContinent_NameProperty = RegisterProperty<string>(p => p.SubContinent_Name, "SubContinents Name");
/// <summary>
/// Gets or sets the SubContinents Name.
/// </summary>
/// <value>The SubContinents Name.</value>
public string SubContinent_Name
{
get { return GetProperty(SubContinent_NameProperty); }
set { SetProperty(SubContinent_NameProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="H05_SubContinent_SingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<H05_SubContinent_Child> H05_SubContinent_SingleObjectProperty = RegisterProperty<H05_SubContinent_Child>(p => p.H05_SubContinent_SingleObject, "H05 SubContinent Single Object", RelationshipTypes.Child);
/// <summary>
/// Gets the H05 Sub Continent Single Object ("self load" child property).
/// </summary>
/// <value>The H05 Sub Continent Single Object.</value>
public H05_SubContinent_Child H05_SubContinent_SingleObject
{
get { return GetProperty(H05_SubContinent_SingleObjectProperty); }
private set { LoadProperty(H05_SubContinent_SingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="H05_SubContinent_ASingleObject"/> property.
/// </summary>
public static readonly PropertyInfo<H05_SubContinent_ReChild> H05_SubContinent_ASingleObjectProperty = RegisterProperty<H05_SubContinent_ReChild>(p => p.H05_SubContinent_ASingleObject, "H05 SubContinent ASingle Object", RelationshipTypes.Child);
/// <summary>
/// Gets the H05 Sub Continent ASingle Object ("self load" child property).
/// </summary>
/// <value>The H05 Sub Continent ASingle Object.</value>
public H05_SubContinent_ReChild H05_SubContinent_ASingleObject
{
get { return GetProperty(H05_SubContinent_ASingleObjectProperty); }
private set { LoadProperty(H05_SubContinent_ASingleObjectProperty, value); }
}
/// <summary>
/// Maintains metadata about child <see cref="H05_CountryObjects"/> property.
/// </summary>
public static readonly PropertyInfo<H05_CountryColl> H05_CountryObjectsProperty = RegisterProperty<H05_CountryColl>(p => p.H05_CountryObjects, "H05 Country Objects", RelationshipTypes.Child);
/// <summary>
/// Gets the H05 Country Objects ("self load" child property).
/// </summary>
/// <value>The H05 Country Objects.</value>
public H05_CountryColl H05_CountryObjects
{
get { return GetProperty(H05_CountryObjectsProperty); }
private set { LoadProperty(H05_CountryObjectsProperty, value); }
}
#endregion
#region Factory Methods
/// <summary>
/// Factory method. Creates a new <see cref="H04_SubContinent"/> object.
/// </summary>
/// <returns>A reference to the created <see cref="H04_SubContinent"/> object.</returns>
internal static H04_SubContinent NewH04_SubContinent()
{
return DataPortal.CreateChild<H04_SubContinent>();
}
/// <summary>
/// Factory method. Loads a <see cref="H04_SubContinent"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
/// <returns>A reference to the fetched <see cref="H04_SubContinent"/> object.</returns>
internal static H04_SubContinent GetH04_SubContinent(SafeDataReader dr)
{
H04_SubContinent obj = new H04_SubContinent();
// show the framework that this is a child object
obj.MarkAsChild();
obj.Fetch(dr);
obj.MarkOld();
return obj;
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="H04_SubContinent"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public H04_SubContinent()
{
// Use factory methods and do not use direct creation.
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="H04_SubContinent"/> object properties.
/// </summary>
[Csla.RunLocal]
protected override void Child_Create()
{
LoadProperty(SubContinent_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID));
LoadProperty(H05_SubContinent_SingleObjectProperty, DataPortal.CreateChild<H05_SubContinent_Child>());
LoadProperty(H05_SubContinent_ASingleObjectProperty, DataPortal.CreateChild<H05_SubContinent_ReChild>());
LoadProperty(H05_CountryObjectsProperty, DataPortal.CreateChild<H05_CountryColl>());
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="H04_SubContinent"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(SubContinent_IDProperty, dr.GetInt32("SubContinent_ID"));
LoadProperty(SubContinent_NameProperty, dr.GetString("SubContinent_Name"));
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
}
/// <summary>
/// Loads child objects.
/// </summary>
internal void FetchChildren()
{
LoadProperty(H05_SubContinent_SingleObjectProperty, H05_SubContinent_Child.GetH05_SubContinent_Child(SubContinent_ID));
LoadProperty(H05_SubContinent_ASingleObjectProperty, H05_SubContinent_ReChild.GetH05_SubContinent_ReChild(SubContinent_ID));
LoadProperty(H05_CountryObjectsProperty, H05_CountryColl.GetH05_CountryColl(SubContinent_ID));
}
/// <summary>
/// Inserts a new <see cref="H04_SubContinent"/> object in the database.
/// </summary>
/// <param name="parent">The parent object.</param>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert(H02_Continent parent)
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("AddH04_SubContinent", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@Parent_Continent_ID", parent.Continent_ID).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).Direction = ParameterDirection.Output;
cmd.Parameters.AddWithValue("@SubContinent_Name", ReadProperty(SubContinent_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnInsertPre(args);
cmd.ExecuteNonQuery();
OnInsertPost(args);
LoadProperty(SubContinent_IDProperty, (int) cmd.Parameters["@SubContinent_ID"].Value);
}
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="H04_SubContinent"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
using (var cmd = new SqlCommand("UpdateH04_SubContinent", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@SubContinent_Name", ReadProperty(SubContinent_NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnUpdatePre(args);
cmd.ExecuteNonQuery();
OnUpdatePost(args);
}
// flushes all pending data operations
FieldManager.UpdateChildren(this);
}
}
/// <summary>
/// Self deletes the <see cref="H04_SubContinent"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager("DeepLoad"))
{
// flushes all pending data operations
FieldManager.UpdateChildren(this);
using (var cmd = new SqlCommand("DeleteH04_SubContinent", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@SubContinent_ID", ReadProperty(SubContinent_IDProperty)).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd);
OnDeletePre(args);
cmd.ExecuteNonQuery();
OnDeletePost(args);
}
}
// removes all previous references to children
LoadProperty(H05_SubContinent_SingleObjectProperty, DataPortal.CreateChild<H05_SubContinent_Child>());
LoadProperty(H05_SubContinent_ASingleObjectProperty, DataPortal.CreateChild<H05_SubContinent_ReChild>());
LoadProperty(H05_CountryObjectsProperty, DataPortal.CreateChild<H05_CountryColl>());
}
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using Microsoft.Protocols.TestTools.StackSdk.Messages.Marshaling;
namespace Microsoft.Protocols.TestTools.StackSdk.FileAccessService.Smb2
{
/// <summary>
/// File information classes are numerical values (specified by the Level column in the following table)
/// that specify what information for a file is to be queried or set
/// </summary>
public enum FileInformationClasses
{
/// <summary>
/// This information class is used to query the access rights of a file.
/// </summary>
FileAccessInformation = 8,
/// <summary>
/// The buffer alignment required by the underlying device.
/// </summary>
FileAlignmentInformation = 17,
/// <summary>
/// This information class is used to query a collection of file information structures.
/// </summary>
FileAllInformation = 18,
/// <summary>
/// This information class is used to query alternate name information for a file.
/// </summary>
FileAlternateNameInformation = 21,
/// <summary>
/// This information class is used to query for attribute and reparse tag information for a file.
/// </summary>
FileAttributeTagInformation = 35,
/// <summary>
/// This information class is used to query or set file information.
/// </summary>
FileBasicInformation = 4,
/// <summary>
/// This information class is used to query compression information for a file
/// </summary>
FileCompressionInformation = 28,
/// <summary>
/// This information class is used to mark a file for deletion.
/// </summary>
FileDispositionInformation = 13,
/// <summary>
/// This information class is used to query for the size of the extended attributes (EA) for a file.
/// </summary>
FileEaInformation = 7,
/// <summary>
/// This information class is used to query or set extended attribute (EA) information for a file.
/// </summary>
FileFullEaInformation = 15,
/// <summary>
/// This information class is used to query NTFS hard links to an existing file.
/// </summary>
FileHardLinkInformation = 46,
/// <summary>
/// This information class is used to query transactional visibility information for the files in a directory
/// </summary>
FileIdGlobalTxDirectoryInformation = 50,
/// <summary>
/// This information class is used to query for the file system's 8-byte file reference number for a file.
/// </summary>
FileInternalInformation = 6,
/// <summary>
/// This information class is used to query or set the mode of the file.
/// </summary>
FileModeInformation = 16,
/// <summary>
/// This information class is used to query for information on a network file open.
/// </summary>
FileNetworkOpenInformation = 34,
/// <summary>
/// Windows file systems do not implement this file information class;
/// the server will fail it with STATUS_NOT_SUPPORTED.
/// </summary>
FileNormalizedNameInformation = 48,
/// <summary>
/// This information class is used to query or set information on a named pipe that is not
/// specific to one end of the pipe or another.
/// </summary>
FilePipeInformation = 23,
/// <summary>
/// This information class is used to query information on a named pipe
/// that is associated with the end of the pipe that is being queried.
/// </summary>
FilePipeLocalInformation = 24,
/// <summary>
/// This information class is used to query or set information on a named pipe
/// that is associated with the client end of the pipe that is being queried.
/// </summary>
FilePipeRemoteInformation = 25,
/// <summary>
/// This information class is used to query or set the position of the file pointer within a file.
/// </summary>
FilePositionInformation = 14,
/// <summary>
/// The information class is used to query quota information.
/// </summary>
FileQuotaInformation = 32,
/// <summary>
/// This information class is used to rename a file
/// </summary>
FileRenameInformation = 10,
/// <summary>
/// This information class is used to query or set reserved bandwidth for a file handle.
/// </summary>
FileSfioReserveInformation = 44,
/// <summary>
/// This information class is used to query file information
/// </summary>
FileStandardInformation = 5,
/// <summary>
/// This information class is used to query file link information
/// </summary>
FileStandardLinkInformation = 54,
/// <summary>
/// This information class is used to enumerate the data streams for a file.
/// </summary>
FileStreamInformation = 22,
/// <summary>
/// This information class is used to set end-of-file information for a file.
/// </summary>
FileEndOfFileInformation = 20,
}
/// <summary>
/// File system information classes are numerical values
/// (specified by the Level column in the following table) that specify what information
/// on a particular instance of a file system on a volume is to be queried.
/// </summary>
public enum FileSystemInformationClasses
{
/// <summary>
/// This information class is used to query attribute information for a file system.
/// </summary>
FileFsAttributeInformation = 5,
/// <summary>
/// This information class is used to query device information associated with a file system volume.
/// </summary>
FileFsDeviceInformation = 4,
/// <summary>
/// This information class is used to query sector size information for a file system volume.
/// </summary>
FileFsFullSizeInformation = 7,
/// <summary>
/// This information class is used to query or set the object ID for a file system data element.
/// </summary>
FileFsObjectIdInformation = 8,
/// <summary>
/// This information class is used to query sector size information for a file system volume.
/// </summary>
FileFsSizeInformation = 3,
/// <summary>
/// This information class is used to query information on a volume on which a file system is mounted.
/// </summary>
FileFsVolumeInformation = 1
}
/// <summary>
/// This information class is used to query or set information on a named pipe
/// that is associated with the client end of the pipe that is being queried
/// </summary>
public struct FilePipeRemoteInformation
{
/// <summary>
/// A LARGE_INTEGER that MUST contain the maximum amount of time counted
/// in 100-nanosecond intervals that will elapse before transmission of
/// data from the client machine to the server.
/// </summary>
public ulong CollectDataTime;
/// <summary>
/// A ULONG that MUST contain the maximum size in bytes of data that will
/// be collected on the client machine before transmission to the server.
/// </summary>
public uint MaximumCollectionCount;
}
/// <summary>
/// A 32-bit unsigned integer referring to the current state of the pipe
/// </summary>
public enum Named_Pipe_State_Value
{
/// <summary>
/// The specified named pipe is in the disconnected state
/// </summary>
FILE_PIPE_DISCONNECTED_STATE = 0x01,
/// <summary>
/// The specified named pipe is in the listening state
/// </summary>
FILE_PIPE_LISTENING_STATE = 0x02,
/// <summary>
/// The specified named pipe is in the connected state.
/// </summary>
FILE_PIPE_CONNECTED_STATE = 0x03,
/// <summary>
/// The specified named pipe is in the closing state.
/// </summary>
FILE_PIPE_CLOSING_STATE = 0x04
}
/// <summary>
/// The FSCTL_PIPE_PEEK response returns data from the pipe server's output buffer in the FSCTL output buffer
/// </summary>
public struct FSCTL_PIPE_PEEK_Reply
{
/// <summary>
/// A 32-bit unsigned integer referring to the current state of the pipe
/// </summary>
public Named_Pipe_State_Value NamedPipeState;
/// <summary>
/// A 32-bit unsigned integer that specifies the size, in bytes, of the data available to read from the pipe
/// </summary>
public uint ReadDataAvailable;
/// <summary>
/// A 32-bit unsigned integer that specifies the number of messages available
/// in the pipe if the pipe has been created as a message-type pipe
/// </summary>
public uint NumberOfMessages;
/// <summary>
/// A 32-bit unsigned integer that specifies the length of the first message
/// available in the pipe if the pipe has been created as a message-type pipe.
/// Otherwise, this field is 0
/// </summary>
public uint MessageLength;
/// <summary>
/// A byte buffer of preview data from the pipe.
/// The length of the buffer is indicated by the value of the ReadDataAvailable field
/// </summary>
public byte[] Data;
}
/// <summary>
/// The FSCTL_FILE_LEVEL_TRIM operation informs the underlying storage medium that the contents
/// of the given range of the file no longer needs to be maintained. This message allows the storage
/// medium to manage its space more efficiently.
/// </summary>
public struct FSCTL_FILE_LEVEL_TRIM_INPUT
{
/// <summary>
/// This field is used for byte range locks to uniquely identify different consumers of byte range
/// locks on the same thread. Typically, this field is used only by remote protocols such as SMB or SMB2
/// </summary>
public uint Key;
/// <summary>
/// A count of how many Offset, Length pairs follow in the data item
/// </summary>
public uint NumRanges;
/// <summary>
/// An array of zero or more FILE_LEVEL_TRIM_RANGE (section 2.3.69.1) data elements.
/// The NumRanges field contains the number of FILE_LEVEL_TRIM_RANGE data elements in the array
/// </summary>
[Size("NumRanges")]
public FSCTL_FILE_LEVEL_TRIM_RANGE[] Ranges;
}
/// <summary>
///
/// </summary>
public struct FSCTL_FILE_LEVEL_TRIM_RANGE
{
/// <summary>
/// A 64-bit unsigned integer that contains a byte offset
/// into the given file at which to start the trim request
/// </summary>
public ulong Offset;
/// <summary>
/// A 64-bit unsigned integer that contains the length,
/// in bytes, of how much of the file to trim, starting at Offset
/// </summary>
public ulong Length;
}
public struct FSCTL_FILE_LEVEL_TRIM_OUTPUT
{
/// <summary>
/// A 32-bit unsigned integer identifying the number of input ranges that were processed
/// </summary>
public uint NumRangesProcessed;
}
public struct FSCTL_GET_INTEGRITY_INFO_OUTPUT
{
public FSCTL_GET_INTEGRITY_INFO_OUTPUT_CHECKSUMALGORITHM ChecksumAlgorithm;
public FSCTL_GET_INTEGRITY_INFO_OUTPUT_RESERVED Reserved;
public FSCTL_GET_INTEGRITY_INFO_OUTPUT_FLAGS Flags;
public uint ChecksumChunkSizeInBytes;
public uint ClusterSizeInBytes;
}
public enum FSCTL_GET_INTEGRITY_INFO_OUTPUT_CHECKSUMALGORITHM : ushort
{
/// <summary>
/// The file or directory is not configured to use integrity.
/// </summary>
CHECKSUM_TYPE_NONE = 0,
/// <summary>
/// The file or directory is configured to use a CRC64 checksum to provide integrity.
/// </summary>
CHECKSUM_TYPE_CRC64 = 0x0002,
}
public enum FSCTL_GET_INTEGRITY_INFO_OUTPUT_RESERVED : ushort
{
V1 = 0
}
[Flags]
public enum FSCTL_GET_INTEGRITY_INFO_OUTPUT_FLAGS : uint
{
/// <summary>
/// Indicates that checksum enforcement is not currently enabled on the target file
/// </summary>
FSCTL_INTEGRITY_FLAG_CHECKSUM_ENFORCEMENT_OFF = 0x00000001
}
/// <summary>
/// The FSCTL_SET_INTEGRITY_INFORMATION Request message requests that the server
/// set the integrity state of the file or directory associated with the handle on which this FSCTL was invoked
/// </summary>
public struct FSCTL_SET_INTEGRIY_INFO_INPUT
{
public FSCTL_SET_INTEGRITY_INFO_INPUT_CHECKSUMALGORITHM ChecksumAlgorithm;
public FSCTL_SET_INTEGRITY_INFO_INPUT_RESERVED Reserved;
public FSCTL_SET_INTEGRITY_INFO_INPUT_FLAGS Flags;
}
public enum FSCTL_SET_INTEGRITY_INFO_INPUT_CHECKSUMALGORITHM : ushort
{
/// <summary>
/// The file or directory should be set to not use integrity
/// </summary>
CHECKSUM_TYPE_NONE = 0,
/// <summary>
/// The file or directory should be set to provide integrity using a CRC64 checksum.
/// </summary>
CHECKSUM_TYPE_CRC64 = 0x0002,
/// <summary>
/// The integrity status of the file or directory should be unchanged.
/// </summary>
CHECKSUM_TYPE_UNCHANGED = 0xFFFF,
}
public enum FSCTL_SET_INTEGRITY_INFO_INPUT_RESERVED : ushort
{
V1 = 0
}
[Flags]
public enum FSCTL_SET_INTEGRITY_INFO_INPUT_FLAGS : uint
{
/// <summary>
/// When set, if a checksum does not match, the associated I/O operation will not be failed.
/// </summary>
FSCTL_INTEGRITY_FLAG_CHECKSUM_ENFORCEMENT_OFF = 0x00000001
}
public struct FSCTL_OFFLOAD_READ_INPUT
{
public uint Size;
public FSCTL_OFFLOAD_READ_INPUT_FLAGS Flags;
public uint TokenTimeToLive;
public uint Reserved;
public ulong FileOffset;
public ulong CopyLength;
}
[Flags]
public enum FSCTL_OFFLOAD_READ_INPUT_FLAGS : uint
{
NONE = 0,
}
public struct FSCTL_OFFLOAD_READ_OUTPUT
{
public uint Size;
public FSCTL_OFFLOAD_READ_INPUT_FLAGS Flag;
public ulong TransferLength;
public STORAGE_OFFLOAD_TOKEN Token;
}
[Flags]
public enum FSCTL_OFFLOAD_READ_OUTPUT_FLAGS : uint
{
NONE = 0,
OFFLOAD_READ_FLAG_ALL_ZERO_BEYOND_CURRENT_RANGE = 0x00000001,
}
public struct STORAGE_OFFLOAD_TOKEN
{
[ByteOrder(EndianType.BigEndian)]
public FSCTL_OFFLOAD_WRITE_INPUT_TOKEN_TYPE TokenType;
[ByteOrder(EndianType.BigEndian)]
public ushort Reserved;
[ByteOrder(EndianType.BigEndian)]
public ushort TokenIdLength;
[StaticSize(504)]
public byte[] TokenId;
}
public enum FSCTL_OFFLOAD_WRITE_INPUT_TOKEN_TYPE : uint
{
STORAGE_OFFLOAD_TOKEN_TYPE_ZERO_DATA = 0xFFFF0001,
}
public struct FSCTL_OFFLOAD_WRITE_INPUT
{
public uint Size;
public FSCTL_OFFLOAD_WRITE_INPUT_FLAGS Flags;
public ulong FileOffset;
public ulong CopyLength;
public ulong TransferOffset;
public STORAGE_OFFLOAD_TOKEN Token;
}
[Flags]
public enum FSCTL_OFFLOAD_WRITE_INPUT_FLAGS : uint
{
NONE = 0,
}
public struct FSCTL_OFFLOAD_WRITE_OUTPUT
{
public uint Size;
public FSCTL_OFFLOAD_WRITE_OUTPUT_FLAGS Flags;
public ulong LengthWritten;
}
[Flags]
public enum FSCTL_OFFLOAD_WRITE_OUTPUT_FLAGS : uint
{
NONE = 0,
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime.Remoting.Messaging;
using Microsoft.ApplicationInsights;
using Microsoft.ApplicationInsights.Extensibility;
using Microsoft.WindowsAzure.ServiceRuntime;
using Microsoft.WindowsAzure.Storage.Queue;
using Microsoft.WindowsAzure.Storage.Table;
using MvcWebRole.Models;
using WorkerRoleA.Telemetry;
namespace WorkerRoleA
{
public class WorkerRoleA : RoleEntryPoint
{
private CloudQueue sendEmailQueue;
private CloudTable mailingListTable;
private CloudTable messageTable;
private CloudTable messagearchiveTable;
private volatile bool onStopCalled = false;
private volatile bool returnedFromRunMethod = false;
private TelemetryClient aiClient = new TelemetryClient();
private static string CORRELATION_SLOT = "CORRELATION-ID";
public override void Run()
{
Trace.TraceInformation("WorkerRoleA entering Run()");
while (true)
{
Stopwatch requestTimer = Stopwatch.StartNew();
var request = RequestTelemetryHelper.StartNewRequest("ProcessMessageWorkflow", DateTimeOffset.UtcNow);
CallContext.LogicalSetData(CORRELATION_SLOT, request.Id);
//Thread.SetData(Thread.GetNamedDataSlot(CORRELATION_SLOT), request.Id);
try
{
var tomorrow = DateTime.Today.AddDays(1.0).ToString("yyyy-MM-dd");
// If OnStop has been called, return to do a graceful shutdown.
if (onStopCalled == true)
{
Trace.TraceInformation("onStopCalled WorkerRoleB");
returnedFromRunMethod = true;
return;
}
// Retrieve all messages that are scheduled for tomorrow or earlier
// and are in Pending or Queuing status.
string typeAndDateFilter = TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.GreaterThan, "message"),
TableOperators.And,
TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.LessThan, tomorrow));
var query = (new TableQuery<Message>().Where(typeAndDateFilter));
var messagesToProcess = messageTable.ExecuteQuery(query).ToList();
TableOperation replaceOperation;
request.Metrics.Add(new KeyValuePair<string, double>("NumberOfMessages", messagesToProcess.Count));
// Process each message (queue emails to be sent).
foreach (Message messageToProcess in messagesToProcess)
{
string restartFlag = "0";
// If the message is already in Queuing status,
// set flag to indicate this is a restart.
if (messageToProcess.Status == "Queuing")
{
restartFlag = "1";
}
// If the message is in Pending status, change
// it to Queuing.
if (messageToProcess.Status == "Pending")
{
messageToProcess.Status = "Queuing";
replaceOperation = TableOperation.Replace(messageToProcess);
messageTable.Execute(replaceOperation);
}
// If the message is in Queuing status,
// process it and change it to Processing status;
// otherwise it's already in processing status, and
// in that case check if processing is complete.
if (messageToProcess.Status == "Queuing")
{
ProcessMessage(messageToProcess, restartFlag);
messageToProcess.Status = "Processing";
replaceOperation = TableOperation.Replace(messageToProcess);
messageTable.Execute(replaceOperation);
}
else
{
CheckAndArchiveIfComplete(messageToProcess);
}
}
RequestTelemetryHelper.DispatchRequest(request, requestTimer.Elapsed, true);
// Sleep to minimize query costs.
System.Threading.Thread.Sleep(1000 * 30);
}
catch (Exception ex)
{
string err = ex.Message;
if (ex.InnerException != null)
{
err += " Inner Exception: " + ex.InnerException.Message;
}
Trace.TraceError(err, ex);
RequestTelemetryHelper.DispatchRequest(request, requestTimer.Elapsed, false);
// Don't fill up Trace storage if we have a bug in queue process loop.
System.Threading.Thread.Sleep(1000 * 60);
}
}
}
private void ProcessMessage(Message messageToProcess, string restartFlag)
{
// Get Mailing List info to get the "From" email address.
var retrieveOperation = TableOperation.Retrieve<MailingList>(messageToProcess.ListName, "mailinglist");
var retrievedResult = mailingListTable.Execute(retrieveOperation);
var mailingList = retrievedResult.Result as MailingList;
if (mailingList == null)
{
Trace.TraceError("Mailing list not found: " + messageToProcess.ListName + " for message: " + messageToProcess.MessageRef);
return;
}
// Get email addresses for this Mailing List.
string filter = TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, messageToProcess.ListName),
TableOperators.And,
TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.NotEqual, "mailinglist"));
var query = new TableQuery<Subscriber>().Where(filter);
var subscribers = mailingListTable.ExecuteQuery(query).ToList();
aiClient.TrackMetric("SubscriberCount", subscribers.Count);
foreach (Subscriber subscriber in subscribers)
{
// Verify that the subscriber email address has been verified.
if (subscriber.Verified == false)
{
Trace.TraceInformation("Subscriber " + subscriber.EmailAddress + " not Verified, so not queuing ");
continue;
}
// Create a SendEmail entity for this email.
var sendEmailRow = new SendEmail
{
PartitionKey = messageToProcess.PartitionKey,
RowKey = messageToProcess.MessageRef.ToString() + subscriber.EmailAddress,
EmailAddress = subscriber.EmailAddress,
EmailSent = false,
MessageRef = messageToProcess.MessageRef,
ScheduledDate = messageToProcess.ScheduledDate,
FromEmailAddress = mailingList.FromEmailAddress,
SubjectLine = messageToProcess.SubjectLine,
SubscriberGUID = subscriber.SubscriberGUID,
ListName = mailingList.ListName
};
// When we try to add the entity to the SendEmail table,
// an exception might happen if this worker role went
// down after processing some of the email addresses and then restarted.
// In that case the row might already be present, so we do an Upsert operation.
try
{
var upsertOperation = TableOperation.InsertOrReplace(sendEmailRow);
messageTable.Execute(upsertOperation);
}
catch (Exception ex)
{
string err = "Error creating SendEmail row: " + ex.Message;
if (ex.InnerException != null)
{
err += " Inner Exception: " + ex.InnerException;
}
Trace.TraceError(err, ex);
}
// Create the queue message.
string queueMessageString =
sendEmailRow.PartitionKey + "," +
sendEmailRow.RowKey + "," +
restartFlag;
var queueMessage = new CloudQueueMessage(queueMessageString);
sendEmailQueue.AddMessage(queueMessage);
}
Trace.TraceInformation("ProcessMessage end PK: "
+ messageToProcess.PartitionKey);
}
private void CheckAndArchiveIfComplete(Message messageToCheck)
{
// Get the list of emails to be sent for this message: all SendEmail rows
// for this message.
string pkrkFilter = TableQuery.CombineFilters(
TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, messageToCheck.PartitionKey),
TableOperators.And,
TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.LessThan, "message"));
var query = new TableQuery<SendEmail>().Where(pkrkFilter);
var emailToBeSent = messageTable.ExecuteQuery(query).FirstOrDefault();
if (emailToBeSent != null)
{
return;
}
// All emails have been sent; copy the message row to the archive table.
// Insert the message row in the messagearchive table
var messageToDelete = new Message { PartitionKey = messageToCheck.PartitionKey, RowKey = messageToCheck.RowKey, ETag = "*" };
messageToCheck.Status = "Complete";
var insertOrReplaceOperation = TableOperation.InsertOrReplace(messageToCheck);
messagearchiveTable.Execute(insertOrReplaceOperation);
// Delete the message row from the message table.
var deleteOperation = TableOperation.Delete(messageToDelete);
messageTable.Execute(deleteOperation);
}
public override void OnStop()
{
onStopCalled = true;
while (returnedFromRunMethod == false)
{
System.Threading.Thread.Sleep(1000);
}
}
public override bool OnStart()
{
TelemetryConfiguration.Active.InstrumentationKey = RoleEnvironment.GetConfigurationSettingValue("APPINSIGHTS_INSTRUMENTATIONKEY");
TelemetryConfiguration.Active.TelemetryInitializers.Add(new ItemCorrelationTelemetryInitializer());
ServicePointManager.DefaultConnectionLimit = Environment.ProcessorCount * 12;
Trace.TraceInformation("Initializing storage account in WorkerA");
var storageAccount = Microsoft.WindowsAzure.Storage.CloudStorageAccount.Parse(RoleEnvironment.GetConfigurationSettingValue("StorageConnectionString"));
CloudQueueClient queueClient = storageAccount.CreateCloudQueueClient();
sendEmailQueue = queueClient.GetQueueReference("azuremailqueue");
var tableClient = storageAccount.CreateCloudTableClient();
mailingListTable = tableClient.GetTableReference("mailinglist");
messageTable = tableClient.GetTableReference("message");
messagearchiveTable = tableClient.GetTableReference("messagearchive");
// Create if not exists for queue, blob container, SentEmail table.
sendEmailQueue.CreateIfNotExists();
messageTable.CreateIfNotExists();
mailingListTable.CreateIfNotExists();
messagearchiveTable.CreateIfNotExists();
return base.OnStart();
}
}
}
| |
// IAddChild, ContentPropertyAttribute
namespace Microsoft._3DTools
{
using System;
using System.Collections;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Markup;
using System.Windows.Media;
/// <summary>
/// This class enables a Viewport3D to be enhanced by allowing UIElements to be placed
/// behind and in front of the Viewport3D. These can then be used for various enhancements.
/// For examples see the Trackball, or InteractiveViewport3D.
/// </summary>
[ContentProperty("Content")]
public abstract class Viewport3DDecorator : FrameworkElement, IAddChild
{
// ---------------------------------------------------------
// Private data
// ---------------------------------------------------------
private UIElement _content;
/// <summary>
/// Creates the Viewport3DDecorator
/// </summary>
public Viewport3DDecorator()
{
// create the two lists of children
this.PreViewportChildren = new UIElementCollection(this, this);
this.PostViewportChildren = new UIElementCollection(this, this);
// no content yet
this._content = null;
}
/// <summary>
/// The content/child of the Viewport3DDecorator. A Viewport3DDecorator only has one
/// child and this child must be either another Viewport3DDecorator or a Viewport3D.
/// </summary>
public UIElement Content
{
get
{
return this._content;
}
set
{
// check to make sure it is a Viewport3D or a Viewport3DDecorator
if (!(value is Viewport3D || value is Viewport3DDecorator))
{
throw new ArgumentException("Not a valid child type", "value");
}
// check to make sure we're attempting to set something new
if (this._content != value)
{
UIElement oldContent = this._content;
UIElement newContent = value;
// remove the previous child
this.RemoveVisualChild(oldContent);
this.RemoveLogicalChild(oldContent);
// set the private variable
this._content = value;
// link in the new child
this.AddLogicalChild(newContent);
this.AddVisualChild(newContent);
// let anyone know that derives from us that there was a change
this.OnViewport3DDecoratorContentChange(oldContent, newContent);
// data bind to what is below us so that we have the same width/height
// as the Viewport3D being enhanced
// create the bindings now for use later
this.BindToContentsWidthHeight(newContent);
// Invalidate measure to indicate a layout update may be necessary
this.InvalidateMeasure();
}
}
}
/// <summary>
/// Property to get the Viewport3D that is being enhanced.
/// </summary>
public Viewport3D Viewport3D
{
get
{
Viewport3D viewport3D = null;
Viewport3DDecorator currEnhancer = this;
// we follow the enhancers down until we get the
// Viewport3D they are enhancing
while (true)
{
UIElement currContent = currEnhancer.Content;
if (currContent == null)
{
break;
}
if (currContent is Viewport3D)
{
viewport3D = (Viewport3D)currContent;
break;
}
currEnhancer = (Viewport3DDecorator)currContent;
}
return viewport3D;
}
}
/// <summary>
/// The UIElements that occur before the Viewport3D
/// </summary>
protected UIElementCollection PreViewportChildren { get; private set; }
/// <summary>
/// The UIElements that occur after the Viewport3D
/// </summary>
protected UIElementCollection PostViewportChildren { get; private set; }
/// <summary>
/// Returns the number of Visual children this element has.
/// </summary>
protected override int VisualChildrenCount
{
get
{
int contentCount = this.Content == null ? 0 : 1;
return this.PreViewportChildren.Count + this.PostViewportChildren.Count + contentCount;
}
}
/// <summary>
/// Returns an enumertor to this element's logical children
/// </summary>
protected override IEnumerator LogicalChildren
{
get
{
Visual[] logicalChildren = new Visual[this.VisualChildrenCount];
for (int i = 0; i < this.VisualChildrenCount; i++)
{
logicalChildren[i] = this.GetVisualChild(i);
}
// return an enumerator to the ArrayList
return logicalChildren.GetEnumerator();
}
}
// ------------------------------------------------------
// IAddChild implementation
// ------------------------------------------------------
void IAddChild.AddChild(object value)
{
// check against null
if (value == null)
{
throw new ArgumentNullException("value");
}
// we only can have one child
if (this.Content != null)
{
throw new ArgumentException("Viewport3DDecorator can only have one child");
}
// now we can actually set the content
this.Content = (UIElement)value;
}
void IAddChild.AddText(string text)
{
// The only text we accept is whitespace, which we ignore.
for (int i = 0; i < text.Length; i++)
{
if (!char.IsWhiteSpace(text[i]))
{
throw new ArgumentException("Non whitespace in add text", text);
}
}
}
/// <summary>
/// Data binds the (Max/Min)Width and (Max/Min)Height properties to the same
/// ones as the content. This will make it so we end up being sized to be
/// exactly the same ActualWidth and ActualHeight as waht is below us.
/// </summary>
/// <param name="newContent">What to bind to</param>
private void BindToContentsWidthHeight(UIElement newContent)
{
// bind to width height
Binding _widthBinding = new Binding("Width");
_widthBinding.Mode = BindingMode.OneWay;
Binding _heightBinding = new Binding("Height");
_heightBinding.Mode = BindingMode.OneWay;
_widthBinding.Source = newContent;
_heightBinding.Source = newContent;
BindingOperations.SetBinding(this, WidthProperty, _widthBinding);
BindingOperations.SetBinding(this, HeightProperty, _heightBinding);
// bind to max width and max height
Binding _maxWidthBinding = new Binding("MaxWidth");
_maxWidthBinding.Mode = BindingMode.OneWay;
Binding _maxHeightBinding = new Binding("MaxHeight");
_maxHeightBinding.Mode = BindingMode.OneWay;
_maxWidthBinding.Source = newContent;
_maxHeightBinding.Source = newContent;
BindingOperations.SetBinding(this, MaxWidthProperty, _maxWidthBinding);
BindingOperations.SetBinding(this, MaxHeightProperty, _maxHeightBinding);
// bind to min width and min height
Binding _minWidthBinding = new Binding("MinWidth");
_minWidthBinding.Mode = BindingMode.OneWay;
Binding _minHeightBinding = new Binding("MinHeight");
_minHeightBinding.Mode = BindingMode.OneWay;
_minWidthBinding.Source = newContent;
_minHeightBinding.Source = newContent;
BindingOperations.SetBinding(this, MinWidthProperty, _minWidthBinding);
BindingOperations.SetBinding(this, MinHeightProperty, _minHeightBinding);
}
/// <summary>
/// Extenders of Viewport3DDecorator can override this function to be notified
/// when the Content property changes
/// </summary>
/// <param name="oldContent">The old value of the Content property</param>
/// <param name="newContent">The new value of the Content property</param>
protected virtual void OnViewport3DDecoratorContentChange(UIElement oldContent, UIElement newContent)
{
}
/// <summary>
/// Returns the child at the specified index.
/// </summary>
protected override Visual GetVisualChild(int index)
{
int orginalIndex = index;
// see if index is in the pre viewport children
if (index < this.PreViewportChildren.Count)
{
return this.PreViewportChildren[index];
}
index -= this.PreViewportChildren.Count;
// see if it's the content
if (this.Content != null && index == 0)
{
return this.Content;
}
index -= this.Content == null ? 0 : 1;
// see if it's the post viewport children
if (index < this.PostViewportChildren.Count)
{
return this.PostViewportChildren[index];
}
// if we didn't return then the index is out of range - throw an error
throw new ArgumentOutOfRangeException("index", orginalIndex, "Out of range visual requested");
}
/// <summary>
/// Updates the DesiredSize of the Viewport3DDecorator
/// </summary>
/// <param name="constraint">The "upper limit" that the return value should not exceed</param>
/// <returns>The desired size of the Viewport3DDecorator</returns>
protected override Size MeasureOverride(Size constraint)
{
Size finalSize = new Size();
this.MeasurePreViewportChildren(constraint);
// measure our Viewport3D(Enhancer)
if (this.Content != null)
{
this.Content.Measure(constraint);
finalSize = this.Content.DesiredSize;
}
this.MeasurePostViewportChildren(constraint);
return finalSize;
}
/// <summary>
/// Measures the size of all the PreViewportChildren. If special measuring behavior is needed, this
/// method should be overridden.
/// </summary>
/// <param name="constraint">The "upper limit" on the size of an element</param>
protected virtual void MeasurePreViewportChildren(Size constraint)
{
// measure the pre viewport children
this.MeasureUIElementCollection(this.PreViewportChildren, constraint);
}
/// <summary>
/// Measures the size of all the PostViewportChildren. If special measuring behavior is needed, this
/// method should be overridden.
/// </summary>
/// <param name="constraint">The "upper limit" on the size of an element</param>
protected virtual void MeasurePostViewportChildren(Size constraint)
{
// measure the post viewport children
this.MeasureUIElementCollection(this.PostViewportChildren, constraint);
}
/// <summary>
/// Measures all of the UIElements in a UIElementCollection
/// </summary>
/// <param name="collection">The collection to measure</param>
/// <param name="constraint">The "upper limit" on the size of an element</param>
private void MeasureUIElementCollection(UIElementCollection collection, Size constraint)
{
// measure the pre viewport visual visuals
foreach (UIElement uiElem in collection)
{
uiElem.Measure(constraint);
}
}
/// <summary>
/// Arranges the Pre and Post Viewport children, and arranges itself
/// </summary>
/// <param name="arrangeSize">The final size to use to arrange itself and its children</param>
protected override Size ArrangeOverride(Size arrangeSize)
{
this.ArrangePreViewportChildren(arrangeSize);
// arrange our Viewport3D(Enhancer)
if (this.Content != null)
{
this.Content.Arrange(new Rect(arrangeSize));
}
this.ArrangePostViewportChildren(arrangeSize);
return arrangeSize;
}
/// <summary>
/// Arranges all the PreViewportChildren. If special measuring behavior is needed, this
/// method should be overridden.
/// </summary>
/// <param name="arrangeSize">The final size to use to arrange each child</param>
protected virtual void ArrangePreViewportChildren(Size arrangeSize)
{
this.ArrangeUIElementCollection(this.PreViewportChildren, arrangeSize);
}
/// <summary>
/// Arranges all the PostViewportChildren. If special measuring behavior is needed, this
/// method should be overridden.
/// </summary>
/// <param name="arrangeSize">The final size to use to arrange each child</param>
protected virtual void ArrangePostViewportChildren(Size arrangeSize)
{
this.ArrangeUIElementCollection(this.PostViewportChildren, arrangeSize);
}
/// <summary>
/// Arranges all the UIElements in the passed in UIElementCollection
/// </summary>
/// <param name="collection">The collection that should be arranged</param>
/// <param name="constraint">The final size that element should use to arrange itself and its children</param>
private void ArrangeUIElementCollection(UIElementCollection collection, Size constraint)
{
// measure the pre viewport visual visuals
foreach (UIElement uiElem in collection)
{
uiElem.Arrange(new Rect(constraint));
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Net.Sockets;
using System.Net.Test.Common;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
using Xunit.Abstractions;
namespace System.Net.WebSockets.Client.Tests
{
public sealed class ArraySegmentSendReceiveTest : SendReceiveTest
{
public ArraySegmentSendReceiveTest(ITestOutputHelper output) : base(output) { }
protected override Task<WebSocketReceiveResult> ReceiveAsync(WebSocket ws, ArraySegment<byte> arraySegment, CancellationToken cancellationToken) =>
ws.ReceiveAsync(arraySegment, cancellationToken);
protected override Task SendAsync(WebSocket ws, ArraySegment<byte> arraySegment, WebSocketMessageType messageType, bool endOfMessage, CancellationToken cancellationToken) =>
ws.SendAsync(arraySegment, messageType, endOfMessage, cancellationToken);
}
public abstract class SendReceiveTest : ClientWebSocketTestBase
{
protected abstract Task SendAsync(WebSocket ws, ArraySegment<byte> arraySegment, WebSocketMessageType messageType, bool endOfMessage, CancellationToken cancellationToken);
protected abstract Task<WebSocketReceiveResult> ReceiveAsync(WebSocket ws, ArraySegment<byte> arraySegment, CancellationToken cancellationToken);
public static bool PartialMessagesSupported => PlatformDetection.ClientWebSocketPartialMessagesSupported;
public SendReceiveTest(ITestOutputHelper output) : base(output) { }
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task SendReceive_PartialMessageDueToSmallReceiveBuffer_Success(Uri server)
{
const int SendBufferSize = 10;
var sendBuffer = new byte[SendBufferSize];
var sendSegment = new ArraySegment<byte>(sendBuffer);
var receiveBuffer = new byte[SendBufferSize / 2];
var receiveSegment = new ArraySegment<byte>(receiveBuffer);
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var ctsDefault = new CancellationTokenSource(TimeOutMilliseconds);
// The server will read buffers and aggregate it before echoing back a complete message.
// But since this test uses a receive buffer that is smaller than the complete message, we will get
// back partial message fragments as we read them until we read the complete message payload.
for (int i = 0; i < SendBufferSize * 5; i++)
{
await SendAsync(cws, sendSegment, WebSocketMessageType.Binary, false, ctsDefault.Token);
}
await SendAsync(cws, sendSegment, WebSocketMessageType.Binary, true, ctsDefault.Token);
WebSocketReceiveResult recvResult = await ReceiveAsync(cws, receiveSegment, ctsDefault.Token);
Assert.Equal(false, recvResult.EndOfMessage);
while (recvResult.EndOfMessage == false)
{
recvResult = await ReceiveAsync(cws, receiveSegment, ctsDefault.Token);
}
await cws.CloseAsync(WebSocketCloseStatus.NormalClosure, "PartialMessageDueToSmallReceiveBufferTest", ctsDefault.Token);
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported), nameof(PartialMessagesSupported)), MemberData(nameof(EchoServers))]
public async Task SendReceive_PartialMessageBeforeCompleteMessageArrives_Success(Uri server)
{
var rand = new Random();
var sendBuffer = new byte[ushort.MaxValue + 1];
rand.NextBytes(sendBuffer);
var sendSegment = new ArraySegment<byte>(sendBuffer);
// Ask the remote server to echo back received messages without ever signaling "end of message".
var ub = new UriBuilder(server);
ub.Query = "replyWithPartialMessages";
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(ub.Uri, TimeOutMilliseconds, _output))
{
var ctsDefault = new CancellationTokenSource(TimeOutMilliseconds);
// Send data to the server; the server will reply back with one or more partial messages. We should be
// able to consume that data as it arrives, without having to wait for "end of message" to be signaled.
await SendAsync(cws, sendSegment, WebSocketMessageType.Binary, true, ctsDefault.Token);
int totalBytesReceived = 0;
var receiveBuffer = new byte[sendBuffer.Length];
while (totalBytesReceived < receiveBuffer.Length)
{
WebSocketReceiveResult recvResult = await ReceiveAsync(
cws,
new ArraySegment<byte>(receiveBuffer, totalBytesReceived, receiveBuffer.Length - totalBytesReceived),
ctsDefault.Token);
Assert.Equal(false, recvResult.EndOfMessage);
Assert.InRange(recvResult.Count, 0, receiveBuffer.Length - totalBytesReceived);
totalBytesReceived += recvResult.Count;
}
Assert.Equal(receiveBuffer.Length, totalBytesReceived);
Assert.Equal<byte>(sendBuffer, receiveBuffer);
await cws.CloseAsync(WebSocketCloseStatus.NormalClosure, "PartialMessageBeforeCompleteMessageArrives", ctsDefault.Token);
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task SendAsync_SendCloseMessageType_ThrowsArgumentExceptionWithMessage(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var cts = new CancellationTokenSource(TimeOutMilliseconds);
string expectedInnerMessage = ResourceHelper.GetExceptionMessage(
"net_WebSockets_Argument_InvalidMessageType",
"Close",
"SendAsync",
"Binary",
"Text",
"CloseOutputAsync");
var expectedException = new ArgumentException(expectedInnerMessage, "messageType");
string expectedMessage = expectedException.Message;
AssertExtensions.Throws<ArgumentException>("messageType", () =>
{
Task t = SendAsync(cws, new ArraySegment<byte>(), WebSocketMessageType.Close, true, cts.Token);
});
Assert.Equal(WebSocketState.Open, cws.State);
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task SendAsync_MultipleOutstandingSendOperations_Throws(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var cts = new CancellationTokenSource(TimeOutMilliseconds);
Task[] tasks = new Task[10];
try
{
for (int i = 0; i < tasks.Length; i++)
{
tasks[i] = SendAsync(
cws,
WebSocketData.GetBufferFromText("hello"),
WebSocketMessageType.Text,
true,
cts.Token);
}
await Task.WhenAll(tasks);
Assert.Equal(WebSocketState.Open, cws.State);
}
catch (AggregateException ag)
{
foreach (var ex in ag.InnerExceptions)
{
if (ex is InvalidOperationException)
{
Assert.Equal(
ResourceHelper.GetExceptionMessage(
"net_Websockets_AlreadyOneOutstandingOperation",
"SendAsync"),
ex.Message);
Assert.Equal(WebSocketState.Aborted, cws.State);
}
else if (ex is WebSocketException)
{
// Multiple cases.
Assert.Equal(WebSocketState.Aborted, cws.State);
WebSocketError errCode = (ex as WebSocketException).WebSocketErrorCode;
Assert.True(
(errCode == WebSocketError.InvalidState) || (errCode == WebSocketError.Success),
"WebSocketErrorCode");
}
else
{
Assert.IsAssignableFrom<OperationCanceledException>(ex);
}
}
}
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task ReceiveAsync_MultipleOutstandingReceiveOperations_Throws(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var cts = new CancellationTokenSource(TimeOutMilliseconds);
Task[] tasks = new Task[2];
await SendAsync(
cws,
WebSocketData.GetBufferFromText(".delay5sec"),
WebSocketMessageType.Text,
true,
cts.Token);
var recvBuffer = new byte[100];
var recvSegment = new ArraySegment<byte>(recvBuffer);
try
{
for (int i = 0; i < tasks.Length; i++)
{
tasks[i] = ReceiveAsync(cws, recvSegment, cts.Token);
}
await Task.WhenAll(tasks);
Assert.Equal(WebSocketState.Open, cws.State);
}
catch (Exception ex)
{
if (ex is InvalidOperationException)
{
Assert.Equal(
ResourceHelper.GetExceptionMessage(
"net_Websockets_AlreadyOneOutstandingOperation",
"ReceiveAsync"),
ex.Message);
Assert.Equal(WebSocketState.Aborted, cws.State);
}
else if (ex is WebSocketException)
{
// Multiple cases.
Assert.Equal(WebSocketState.Aborted, cws.State);
WebSocketError errCode = (ex as WebSocketException).WebSocketErrorCode;
Assert.True(
(errCode == WebSocketError.InvalidState) || (errCode == WebSocketError.Success),
"WebSocketErrorCode");
}
else if (ex is OperationCanceledException)
{
Assert.Equal(WebSocketState.Aborted, cws.State);
}
else
{
Assert.True(false, "Unexpected exception: " + ex.Message);
}
}
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task SendAsync_SendZeroLengthPayloadAsEndOfMessage_Success(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var cts = new CancellationTokenSource(TimeOutMilliseconds);
string message = "hello";
await SendAsync(
cws,
WebSocketData.GetBufferFromText(message),
WebSocketMessageType.Text,
false,
cts.Token);
Assert.Equal(WebSocketState.Open, cws.State);
await SendAsync(
cws,
new ArraySegment<byte>(new byte[0]),
WebSocketMessageType.Text,
true,
cts.Token);
Assert.Equal(WebSocketState.Open, cws.State);
var recvBuffer = new byte[100];
var receiveSegment = new ArraySegment<byte>(recvBuffer);
WebSocketReceiveResult recvRet = await ReceiveAsync(cws, receiveSegment, cts.Token);
Assert.Equal(WebSocketState.Open, cws.State);
Assert.Equal(message.Length, recvRet.Count);
Assert.Equal(WebSocketMessageType.Text, recvRet.MessageType);
Assert.Equal(true, recvRet.EndOfMessage);
Assert.Equal(null, recvRet.CloseStatus);
Assert.Equal(null, recvRet.CloseStatusDescription);
var recvSegment = new ArraySegment<byte>(receiveSegment.Array, receiveSegment.Offset, recvRet.Count);
Assert.Equal(message, WebSocketData.GetTextFromBuffer(recvSegment));
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task SendReceive_VaryingLengthBuffers_Success(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var rand = new Random();
var ctsDefault = new CancellationTokenSource(TimeOutMilliseconds);
// Values chosen close to boundaries in websockets message length handling as well
// as in vectors used in mask application.
foreach (int bufferSize in new int[] { 1, 3, 4, 5, 31, 32, 33, 125, 126, 127, 128, ushort.MaxValue - 1, ushort.MaxValue, ushort.MaxValue + 1, ushort.MaxValue * 2 })
{
byte[] sendBuffer = new byte[bufferSize];
rand.NextBytes(sendBuffer);
await SendAsync(cws, new ArraySegment<byte>(sendBuffer), WebSocketMessageType.Binary, true, ctsDefault.Token);
byte[] receiveBuffer = new byte[bufferSize];
int totalReceived = 0;
while (true)
{
WebSocketReceiveResult recvResult = await ReceiveAsync(
cws,
new ArraySegment<byte>(receiveBuffer, totalReceived, receiveBuffer.Length - totalReceived),
ctsDefault.Token);
Assert.InRange(recvResult.Count, 0, receiveBuffer.Length - totalReceived);
totalReceived += recvResult.Count;
if (recvResult.EndOfMessage) break;
}
Assert.Equal(receiveBuffer.Length, totalReceived);
Assert.Equal<byte>(sendBuffer, receiveBuffer);
}
await cws.CloseAsync(WebSocketCloseStatus.NormalClosure, "SendReceive_VaryingLengthBuffers_Success", ctsDefault.Token);
}
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task SendReceive_Concurrent_Success(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var ctsDefault = new CancellationTokenSource(TimeOutMilliseconds);
byte[] receiveBuffer = new byte[10];
byte[] sendBuffer = new byte[10];
for (int i = 0; i < sendBuffer.Length; i++)
{
sendBuffer[i] = (byte)i;
}
for (int i = 0; i < sendBuffer.Length; i++)
{
Task<WebSocketReceiveResult> receive = ReceiveAsync(cws, new ArraySegment<byte>(receiveBuffer, receiveBuffer.Length - i - 1, 1), ctsDefault.Token);
Task send = SendAsync(cws, new ArraySegment<byte>(sendBuffer, i, 1), WebSocketMessageType.Binary, true, ctsDefault.Token);
await Task.WhenAll(receive, send);
Assert.Equal(1, receive.Result.Count);
}
await cws.CloseAsync(WebSocketCloseStatus.NormalClosure, "SendReceive_VaryingLengthBuffers_Success", ctsDefault.Token);
Array.Reverse(receiveBuffer);
Assert.Equal<byte>(sendBuffer, receiveBuffer);
}
}
[OuterLoop("Uses external servers")]
[ConditionalFact(nameof(WebSocketsSupported))]
public async Task SendReceive_ConnectionClosedPrematurely_ReceiveAsyncFailsAndWebSocketStateUpdated()
{
var options = new LoopbackServer.Options { WebSocketEndpoint = true };
Func<ClientWebSocket, LoopbackServer, Uri, Task> connectToServerThatAbortsConnection = async (clientSocket, server, url) =>
{
var pendingReceiveAsyncPosted = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
// Start listening for incoming connections on the server side.
Task acceptTask = server.AcceptConnectionAsync(async connection =>
{
// Complete the WebSocket upgrade. After this is done, the client-side ConnectAsync should complete.
Assert.NotNull(await LoopbackHelper.WebSocketHandshakeAsync(connection));
// Wait for client-side ConnectAsync to complete and for a pending ReceiveAsync to be posted.
await pendingReceiveAsyncPosted.Task.TimeoutAfter(TimeOutMilliseconds);
// Close the underlying connection prematurely (without sending a WebSocket Close frame).
connection.Socket.Shutdown(SocketShutdown.Both);
connection.Socket.Close();
});
// Initiate a connection attempt.
var cts = new CancellationTokenSource(TimeOutMilliseconds);
await clientSocket.ConnectAsync(url, cts.Token);
// Post a pending ReceiveAsync before the TCP connection is torn down.
var recvBuffer = new byte[100];
var recvSegment = new ArraySegment<byte>(recvBuffer);
Task pendingReceiveAsync = ReceiveAsync(clientSocket, recvSegment, cts.Token);
pendingReceiveAsyncPosted.SetResult(true);
// Wait for the server to close the underlying connection.
await acceptTask.WithCancellation(cts.Token);
WebSocketException pendingReceiveException = await Assert.ThrowsAsync<WebSocketException>(() => pendingReceiveAsync);
Assert.Equal(WebSocketError.ConnectionClosedPrematurely, pendingReceiveException.WebSocketErrorCode);
if (PlatformDetection.IsUap)
{
const uint WININET_E_CONNECTION_ABORTED = 0x80072EFE;
Assert.NotNull(pendingReceiveException.InnerException);
Assert.Equal(WININET_E_CONNECTION_ABORTED, (uint)pendingReceiveException.InnerException.HResult);
}
WebSocketException newReceiveException =
await Assert.ThrowsAsync<WebSocketException>(() => ReceiveAsync(clientSocket, recvSegment, cts.Token));
Assert.Equal(
ResourceHelper.GetExceptionMessage("net_WebSockets_InvalidState", "Aborted", "Open, CloseSent"),
newReceiveException.Message);
Assert.Equal(WebSocketState.Aborted, clientSocket.State);
Assert.Null(clientSocket.CloseStatus);
};
await LoopbackServer.CreateServerAsync(async (server, url) =>
{
using (ClientWebSocket clientSocket = new ClientWebSocket())
{
await connectToServerThatAbortsConnection(clientSocket, server, url);
}
}, options);
}
[OuterLoop("Uses external servers")]
[ConditionalTheory(nameof(WebSocketsSupported)), MemberData(nameof(EchoServers))]
public async Task ZeroByteReceive_CompletesWhenDataAvailable(Uri server)
{
using (ClientWebSocket cws = await WebSocketHelper.GetConnectedWebSocket(server, TimeOutMilliseconds, _output))
{
var rand = new Random();
var ctsDefault = new CancellationTokenSource(TimeOutMilliseconds);
// Do a 0-byte receive. It shouldn't complete yet.
Task<WebSocketReceiveResult> t = ReceiveAsync(cws, new ArraySegment<byte>(Array.Empty<byte>()), ctsDefault.Token);
Assert.False(t.IsCompleted);
// Send a packet to the echo server.
await SendAsync(cws, new ArraySegment<byte>(new byte[1] { 42 }), WebSocketMessageType.Binary, true, ctsDefault.Token);
// Now the 0-byte receive should complete, but without reading any data.
WebSocketReceiveResult r = await t;
Assert.Equal(WebSocketMessageType.Binary, r.MessageType);
Assert.Equal(0, r.Count);
Assert.False(r.EndOfMessage);
// Now do a receive to get the payload.
var receiveBuffer = new byte[1];
t = ReceiveAsync(cws, new ArraySegment<byte>(receiveBuffer), ctsDefault.Token);
// Skip synchronous completion check on UAP since it uses WinRT APIs underneath.
if (!PlatformDetection.IsUap)
{
Assert.Equal(TaskStatus.RanToCompletion, t.Status);
}
r = await t;
Assert.Equal(WebSocketMessageType.Binary, r.MessageType);
Assert.Equal(1, r.Count);
Assert.True(r.EndOfMessage);
Assert.Equal(42, receiveBuffer[0]);
// Clean up.
await cws.CloseAsync(WebSocketCloseStatus.NormalClosure, nameof(ZeroByteReceive_CompletesWhenDataAvailable), ctsDefault.Token);
}
}
}
}
| |
using DragonSpark.Extensions;
using DragonSpark.Sources.Parameterized;
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity;
using System.Data.Entity.Core.Metadata.Edm;
using System.Data.Entity.Core.Objects;
using System.Data.Entity.Infrastructure;
using System.Data.Entity.Migrations;
using System.Data.Entity.Validation;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using Type = System.Type;
namespace DragonSpark.Windows.Entity
{
public static class DbContextExtensions
{
readonly static MethodInfo
GetMethod = typeof(DbContextExtensions).GetMethod( nameof(DbContextExtensions.Get), new[] { typeof(DbContext), typeof(object), typeof(int) } ),
ApplyChangesMethod = typeof(DbContextExtensions).GetMethods().FirstOrDefault( x => x.IsGenericMethod && x.Name == nameof(ApplyChanges) );
public static int Save( this DbContext target )
{
try
{
return target.SaveChanges();
}
catch ( DbEntityValidationException error )
{
throw new EntityValidationException( target, error );
}
}
public static object ApplyChanges( this DbContext target, object entity )
{
var type = entity.GetType();
var items = target.GetEntityProperties( type ).SelectMany( x => type.GetProperty( x.Name ).GetValue( entity ).With( o => o.AsTo<IEnumerable, IEnumerable<object>>( enumerable => enumerable.Cast<object>().Select( target.ApplyChanges ), o.ToItem ) ) );
foreach ( var o in entity.Prepend( items ).Distinct() )
{
ApplyChangesMethod.MakeGenericMethod( o.GetType() ).Invoke( null, new[] { target, o } );
}
return entity;
}
public static TEntity ApplyChanges<TEntity>( this DbContext target, TEntity entity ) where TEntity : class
{
var entityState = target.Entry( entity ).State;
switch ( entityState )
{
case EntityState.Detached:
var dbSet = target.Set<TEntity>();
dbSet.AddOrUpdate( dbSet.Attach( entity ) );
break;
}
return entity;
}
public static object Get( this DbContext target, object entity, Type entityType = null ) => GetMethod.MakeGenericMethod( entityType ?? entity.GetType() ).Invoke( null, new[] { target, entity, 1 } );
public static TItem Get<TItem>( this DbContext target, object container, int levels = 1 ) where TItem : class
{
var key = new KeyFactory<TItem>( target ).Get( container );
var current = target.Set<TItem>().Find( key.Values.ToArray() );
var result = current.With( x => target.Include( x, levels ) );
return result;
}
sealed class DefaultAssociationPropertyFactory : ParameterizedSourceBase<Type, string[]>
{
readonly IObjectContextAdapter adapter;
public DefaultAssociationPropertyFactory( IObjectContextAdapter adapter )
{
this.adapter = adapter;
}
public override string[] Get( Type parameter )
{
var names = GetAssociationPropertyNames( adapter, parameter );
var decorated = parameter.GetProperties().Where( x => AttributeProviderExtensions.Has<DefaultIncludeAttribute>( x ) ).Select( x => x.Name );
var result = decorated.Union( names ).ToArray();
return result;
}
static IEnumerable<string> GetAssociationPropertyNames( IObjectContextAdapter target, Type type ) => target.GetEntityProperties( type ).Select( x => type.GetProperty( x.Name ) ).Where( x => x.PropertyType.Adapt().GetInnerType() == null ).Select( x => x.Name );
}
public static TItem Create<TItem>( this DbContext target, Action<TItem> with = null ) where TItem : class, new()
{
var item = new TItem().With( with );
var result = target.Set<TItem>().Add( item );
return result;
}
public static void Remove<T>( this DbContext context, T entity, bool clearProperties = true ) where T : class
{
if ( clearProperties )
{
var type = entity.GetType();
var properties = context.GetEntityProperties( type ).Where( x => x.FromEndMember.DeleteBehavior == OperationAction.Cascade ).Select( x => x.Name ).ToArray();
Load( context, entity, properties );
properties.Each( x =>
{
var property = type.GetProperty( x );
var raw = property.GetValue( entity );
var items = property.PropertyType.Adapt().GetInnerType() != null ? raw.To<IEnumerable>().Cast<object>().ToArray() : new[] { raw };
items.Each( y => context.Set( y.GetType() ).Remove( y ) );
context.Save();
} );
}
context.Set<T>().Remove( entity );
}
class KeyFactory<TEntity> : ParameterizedSourceBase<IDictionary<string, object>>
{
readonly IObjectContextAdapter context;
public KeyFactory( IObjectContextAdapter context )
{
this.context = context;
}
public override IDictionary<string, object> Get( object parameter )
{
var names = context.ObjectContext.DetermineEntitySet( typeof(TEntity) ).With( x => x.ElementType.KeyMembers.Select( y => y.Name ).ToArray() );
return parameter.GetType().IsPrimitive ? new Dictionary<string, object> { { names.First(), parameter } } : DetermineKeyComplex( context, parameter, names );
}
static IDictionary<string, object> DetermineKeyComplex( IObjectContextAdapter target, object container, string[] names )
{
var result = names.Select( name =>
{
var info = container.GetType().GetProperty( name, BindingFlags.IgnoreCase | BindingFlags.Public | BindingFlags.Default | BindingFlags.FlattenHierarchy );
var value = AttributeProviderExtensions.From<ForeignKeyAttribute, object>( typeof(TEntity).GetProperty( name ), y =>
{
var propertyInfo = container.GetType().GetProperty( y.Name, BindingFlags.IgnoreCase | BindingFlags.Public | BindingFlags.Default | BindingFlags.FlattenHierarchy );
var o = propertyInfo.GetValue( container );
return o.With( z =>
{
var objectStateEntry = target.AsTo<IObjectContextAdapter, ObjectContext>( x => x.ObjectContext ).ObjectStateManager.GetObjectStateEntry( z );
return objectStateEntry.EntityKey.EntityKeyValues.First().Value;
} );
} ) ?? info.GetValue( container );
return new { name, value };
} ).ToDictionary( x => x.name, x => x.value );
return result;
}
}
public static IEnumerable<NavigationProperty> GetEntityProperties( this IObjectContextAdapter target, Type type ) => target.ObjectContext.MetadataWorkspace.GetEntityMetadata( type ).NavigationProperties;
public static Type[] GetDeclaredEntityTypes( this DbContext context ) => EnumerableExtensions.WhereAssigned( context.GetType().GetProperties().Where( x => x.PropertyType.IsGenericType && typeof( DbSet<> ).IsAssignableFrom( x.PropertyType.GetGenericTypeDefinition() ) ).Select( x => x.PropertyType.GetGenericArguments().FirstOrDefault() ) ).ToArray();
public static TEntity Include<TEntity>( this DbContext target, TEntity entity, int levels, params Expression<Func<TEntity, object>>[] expressions ) where TEntity : class => target.Include( entity, expressions.Select( x => x.GetMemberInfo().Name ).ToArray(), levels );
public static TEntity Include<TEntity>( this DbContext target, TEntity entity, string[] associationNames, int levels = 1 ) where TEntity : class
{
var associations = associationNames ?? Enumerable.Empty<string>();
var names = associations.Union( new DefaultAssociationPropertyFactory( target ).Get( typeof(TEntity) ) ).ToArray();
var result = Load( target, entity, names, levels );
return result;
}
public static TItem Load<TItem>( this DbContext target, TItem entity, string[] properties = null, int? levels = 1, bool? loadAllProperties = null )
{
LoadAll( target, entity, new ArrayList(), properties, loadAllProperties.GetValueOrDefault( levels == 1 ), levels, 0 );
return entity;
}
static void LoadAll( DbContext target, object entity, IList list, IEnumerable<string> properties, bool loadAllProperties, int? levels, int count )
{
if ( !list.Contains( entity ) )
{
list.Add( entity );
var type = entity.GetType();
var names = properties ?? ( loadAllProperties ? target.GetEntityProperties( type ).Select( x => x.Name ) : new DefaultAssociationPropertyFactory( target ).Get( type ) );
var associationNames = names.ToArray();
LoadEntity( target, entity, associationNames );
if ( !levels.HasValue || ++count < levels.Value )
{
foreach ( var z in associationNames.Select( y => type.GetProperty( y ).GetValue( entity ) ).WhereAssigned() )
{
var items = z.Adapt().GetInnerType() != null ? z.AsTo<IEnumerable, object[]>( a => a.Cast<object>().ToArray() ) : z.ToItem();
foreach ( var item in items )
{
LoadAll( target, item, list, null, loadAllProperties, levels, count );
}
}
}
}
}
static void LoadEntity( DbContext target, object entity, IEnumerable<string> associationNames )
{
var entry = target.Entry( entity );
if ( entry.State != EntityState.Added )
{
foreach ( var name in associationNames )
{
if ( entity.GetType().GetProperty( name ).PropertyType.Adapt().GetInnerType() != null )
{
var collection = entry.Collection( name );
var current = collection.CurrentValue.AsTo<IEnumerable, IEnumerable<object>>( x => x.Cast<object>() );
var canLoad = !collection.IsLoaded && current.All( x => target.Entry( x ).State != EntityState.Added );
try
{
canLoad.IsTrue( collection.Load );
}
catch ( InvalidOperationException )
{}
}
else
{
var reference = entry.Reference( name );
reference.IsLoaded.IsFalse( reference.Load );
}
}
}
}
}
}
| |
// This code is part of the Fungus library (http://fungusgames.com) maintained by Chris Gregan (http://twitter.com/gofungus).
// It is released for free under the MIT open source license (https://github.com/snozbot/fungus/blob/master/LICENSE)
using UnityEngine;
using UnityEngine.UI;
using System;
using System.Collections;
using System.Collections.Generic;
namespace Fungus
{
/// <summary>
/// Display story text in a visual novel style dialog box.
/// </summary>
public class SayDialog : MonoBehaviour
{
[Tooltip("Duration to fade dialogue in/out")]
[SerializeField] protected float fadeDuration = 0.25f;
[Tooltip("The continue button UI object")]
[SerializeField] protected Button continueButton;
[Tooltip("The canvas UI object")]
[SerializeField] protected Canvas dialogCanvas;
[Tooltip("The name text UI object")]
[SerializeField] protected Text nameText;
[Tooltip("The story text UI object")]
[SerializeField] protected Text storyText;
public virtual Text StoryText { get { return storyText; } }
[Tooltip("The character UI object")]
[SerializeField] protected Image characterImage;
public virtual Image CharacterImage { get { return characterImage; } }
[Tooltip("Adjust width of story text when Character Image is displayed (to avoid overlapping)")]
[SerializeField] protected bool fitTextWithImage = true;
protected float startStoryTextWidth;
protected float startStoryTextInset;
protected WriterAudio writerAudio;
protected Writer writer;
protected CanvasGroup canvasGroup;
protected bool fadeWhenDone = true;
protected float targetAlpha = 0f;
protected float fadeCoolDownTimer = 0f;
protected Sprite currentCharacterImage;
// Most recent speaking character
protected static Character speakingCharacter;
protected StringSubstituter stringSubstituter = new StringSubstituter();
// Cache active Say Dialogs to avoid expensive scene search
protected static List<SayDialog> activeSayDialogs = new List<SayDialog>();
protected void Awake()
{
if (!activeSayDialogs.Contains(this))
{
activeSayDialogs.Add(this);
}
}
protected void OnDestroy()
{
activeSayDialogs.Remove(this);
}
protected Writer GetWriter()
{
if (writer != null)
{
return writer;
}
writer = GetComponent<Writer>();
if (writer == null)
{
writer = gameObject.AddComponent<Writer>();
}
return writer;
}
protected CanvasGroup GetCanvasGroup()
{
if (canvasGroup != null)
{
return canvasGroup;
}
canvasGroup = GetComponent<CanvasGroup>();
if (canvasGroup == null)
{
canvasGroup = gameObject.AddComponent<CanvasGroup>();
}
return canvasGroup;
}
protected WriterAudio GetWriterAudio()
{
if (writerAudio != null)
{
return writerAudio;
}
writerAudio = GetComponent<WriterAudio>();
if (writerAudio == null)
{
writerAudio = gameObject.AddComponent<WriterAudio>();
}
return writerAudio;
}
protected void Start()
{
// Dialog always starts invisible, will be faded in when writing starts
GetCanvasGroup().alpha = 0f;
// Add a raycaster if none already exists so we can handle dialog input
GraphicRaycaster raycaster = GetComponent<GraphicRaycaster>();
if (raycaster == null)
{
gameObject.AddComponent<GraphicRaycaster>();
}
// It's possible that SetCharacterImage() has already been called from the
// Start method of another component, so check that no image has been set yet.
// Same for nameText.
if (nameText != null && nameText.text == "")
{
SetCharacterName("", Color.white);
}
if (currentCharacterImage == null)
{
// Character image is hidden by default.
SetCharacterImage(null);
}
stringSubstituter.CacheSubstitutionHandlers();
}
protected virtual void LateUpdate()
{
UpdateAlpha();
if (continueButton != null)
{
continueButton.gameObject.SetActive( GetWriter().IsWaitingForInput );
}
}
protected virtual void UpdateAlpha()
{
if (GetWriter().IsWriting)
{
targetAlpha = 1f;
fadeCoolDownTimer = 0.1f;
}
else if (fadeWhenDone && Mathf.Approximately(fadeCoolDownTimer, 0f))
{
targetAlpha = 0f;
}
else
{
// Add a short delay before we start fading in case there's another Say command in the next frame or two.
// This avoids a noticeable flicker between consecutive Say commands.
fadeCoolDownTimer = Mathf.Max(0f, fadeCoolDownTimer - Time.deltaTime);
}
CanvasGroup canvasGroup = GetCanvasGroup();
if (fadeDuration <= 0f)
{
canvasGroup.alpha = targetAlpha;
}
else
{
float delta = (1f / fadeDuration) * Time.deltaTime;
float alpha = Mathf.MoveTowards(canvasGroup.alpha, targetAlpha, delta);
canvasGroup.alpha = alpha;
if (alpha <= 0f)
{
// Deactivate dialog object once invisible
gameObject.SetActive(false);
}
}
}
protected virtual void ClearStoryText()
{
if (storyText != null)
{
storyText.text = "";
}
}
#region Public members
/// <summary>
/// Currently active Say Dialog used to display Say text
/// </summary>
public static SayDialog ActiveSayDialog { get; set; }
/// <summary>
/// Returns a SayDialog by searching for one in the scene or creating one if none exists.
/// </summary>
public static SayDialog GetSayDialog()
{
if (ActiveSayDialog == null)
{
SayDialog sd = null;
// Use first active Say Dialog in the scene (if any)
if (activeSayDialogs.Count > 0)
{
sd = activeSayDialogs[0];
}
if (sd != null)
{
ActiveSayDialog = sd;
}
if (ActiveSayDialog == null)
{
// Auto spawn a say dialog object from the prefab
GameObject prefab = Resources.Load<GameObject>("Prefabs/SayDialog");
if (prefab != null)
{
GameObject go = Instantiate(prefab) as GameObject;
go.SetActive(false);
go.name = "SayDialog";
ActiveSayDialog = go.GetComponent<SayDialog>();
}
}
}
return ActiveSayDialog;
}
/// <summary>
/// Stops all active portrait tweens.
/// </summary>
public static void StopPortraitTweens()
{
// Stop all tweening portraits
var activeCharacters = Character.ActiveCharacters;
for (int i = 0; i < activeCharacters.Count; i++)
{
var c = activeCharacters[i];
if (c.State.portraitImage != null)
{
if (LeanTween.isTweening(c.State.portraitImage.gameObject))
{
LeanTween.cancel(c.State.portraitImage.gameObject, true);
PortraitController.SetRectTransform(c.State.portraitImage.rectTransform, c.State.position);
if (c.State.dimmed == true)
{
c.State.portraitImage.color = new Color(0.5f, 0.5f, 0.5f, 1f);
}
else
{
c.State.portraitImage.color = Color.white;
}
}
}
}
}
/// <summary>
/// Sets the active state of the Say Dialog gameobject.
/// </summary>
public virtual void SetActive(bool state)
{
gameObject.SetActive(state);
}
/// <summary>
/// Sets the active speaking character.
/// </summary>
/// <param name="character">The active speaking character.</param>
public virtual void SetCharacter(Character character)
{
if (character == null)
{
if (characterImage != null)
{
characterImage.gameObject.SetActive(false);
}
if (nameText != null)
{
nameText.text = "";
}
speakingCharacter = null;
}
else
{
var prevSpeakingCharacter = speakingCharacter;
speakingCharacter = character;
// Dim portraits of non-speaking characters
var activeStages = Stage.ActiveStages;
for (int i = 0; i < activeStages.Count; i++)
{
var stage = activeStages[i];
if (stage.DimPortraits)
{
var charactersOnStage = stage.CharactersOnStage;
for (int j = 0; j < charactersOnStage.Count; j++)
{
var c = charactersOnStage[j];
if (prevSpeakingCharacter != speakingCharacter)
{
if (c != null && !c.Equals(speakingCharacter))
{
stage.SetDimmed(c, true);
}
else
{
stage.SetDimmed(c, false);
}
}
}
}
}
string characterName = character.NameText;
if (characterName == "")
{
// Use game object name as default
characterName = character.GetObjectName();
}
SetCharacterName(characterName, character.NameColor);
}
}
/// <summary>
/// Sets the character image to display on the Say Dialog.
/// </summary>
public virtual void SetCharacterImage(Sprite image)
{
if (characterImage == null)
{
return;
}
if (image != null)
{
characterImage.sprite = image;
characterImage.gameObject.SetActive(true);
currentCharacterImage = image;
}
else
{
characterImage.gameObject.SetActive(false);
if (startStoryTextWidth != 0)
{
storyText.rectTransform.SetInsetAndSizeFromParentEdge(RectTransform.Edge.Left,
startStoryTextInset,
startStoryTextWidth);
}
}
// Adjust story text box to not overlap image rect
if (fitTextWithImage &&
storyText != null &&
characterImage.gameObject.activeSelf)
{
if (Mathf.Approximately(startStoryTextWidth, 0f))
{
startStoryTextWidth = storyText.rectTransform.rect.width;
startStoryTextInset = storyText.rectTransform.offsetMin.x;
}
// Clamp story text to left or right depending on relative position of the character image
if (storyText.rectTransform.position.x < characterImage.rectTransform.position.x)
{
storyText.rectTransform.SetInsetAndSizeFromParentEdge(RectTransform.Edge.Left,
startStoryTextInset,
startStoryTextWidth - characterImage.rectTransform.rect.width);
}
else
{
storyText.rectTransform.SetInsetAndSizeFromParentEdge(RectTransform.Edge.Right,
startStoryTextInset,
startStoryTextWidth - characterImage.rectTransform.rect.width);
}
}
}
/// <summary>
/// Sets the character name to display on the Say Dialog.
/// Supports variable substitution e.g. John {$surname}
/// </summary>
public virtual void SetCharacterName(string name, Color color)
{
if (nameText != null)
{
var subbedName = stringSubstituter.SubstituteStrings(name);
nameText.text = subbedName;
nameText.color = color;
}
}
/// <summary>
/// Write a line of story text to the Say Dialog. Starts coroutine automatically.
/// </summary>
/// <param name="text">The text to display.</param>
/// <param name="clearPrevious">Clear any previous text in the Say Dialog.</param>
/// <param name="waitForInput">Wait for player input before continuing once text is written.</param>
/// <param name="fadeWhenDone">Fade out the Say Dialog when writing and player input has finished.</param>
/// <param name="stopVoiceover">Stop any existing voiceover audio before writing starts.</param>
/// <param name="voiceOverClip">Voice over audio clip to play.</param>
/// <param name="onComplete">Callback to execute when writing and player input have finished.</param>
public virtual void Say(string text, bool clearPrevious, bool waitForInput, bool fadeWhenDone, bool stopVoiceover, AudioClip voiceOverClip, Action onComplete)
{
StartCoroutine(DoSay(text, clearPrevious, waitForInput, fadeWhenDone, stopVoiceover, voiceOverClip, onComplete));
}
/// <summary>
/// Write a line of story text to the Say Dialog. Must be started as a coroutine.
/// </summary>
/// <param name="text">The text to display.</param>
/// <param name="clearPrevious">Clear any previous text in the Say Dialog.</param>
/// <param name="waitForInput">Wait for player input before continuing once text is written.</param>
/// <param name="fadeWhenDone">Fade out the Say Dialog when writing and player input has finished.</param>
/// <param name="stopVoiceover">Stop any existing voiceover audio before writing starts.</param>
/// <param name="voiceOverClip">Voice over audio clip to play.</param>
/// <param name="onComplete">Callback to execute when writing and player input have finished.</param>
public virtual IEnumerator DoSay(string text, bool clearPrevious, bool waitForInput, bool fadeWhenDone, bool stopVoiceover, AudioClip voiceOverClip, Action onComplete)
{
var writer = GetWriter();
if (writer.IsWriting || writer.IsWaitingForInput)
{
writer.Stop();
while (writer.IsWriting || writer.IsWaitingForInput)
{
yield return null;
}
}
gameObject.SetActive(true);
this.fadeWhenDone = fadeWhenDone;
// Voice over clip takes precedence over a character sound effect if provided
AudioClip soundEffectClip = null;
if (voiceOverClip != null)
{
WriterAudio writerAudio = GetWriterAudio();
writerAudio.OnVoiceover(voiceOverClip);
}
else if (speakingCharacter != null)
{
soundEffectClip = speakingCharacter.SoundEffect;
}
yield return StartCoroutine(writer.Write(text, clearPrevious, waitForInput, stopVoiceover, soundEffectClip, onComplete));
}
/// <summary>
/// Tell the Say Dialog to fade out once writing and player input have finished.
/// </summary>
public virtual bool FadeWhenDone { set { fadeWhenDone = value; } }
/// <summary>
/// Stop the Say Dialog while its writing text.
/// </summary>
public virtual void Stop()
{
fadeWhenDone = true;
GetWriter().Stop();
}
/// <summary>
/// Stops writing text and clears the Say Dialog.
/// </summary>
public virtual void Clear()
{
ClearStoryText();
// Kill any active write coroutine
StopAllCoroutines();
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.CodeFixes.Suppression;
using Microsoft.CodeAnalysis.CodeRefactorings;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Editor.Host;
using Microsoft.CodeAnalysis.Editor.Shared;
using Microsoft.CodeAnalysis.Editor.Shared.Extensions;
using Microsoft.CodeAnalysis.Editor.Shared.Options;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.Experiments;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Text;
using Microsoft.CodeAnalysis.Text.Shared.Extensions;
using Microsoft.VisualStudio.Language.Intellisense;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Editor;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.Suggestions
{
using CodeFixGroupKey = Tuple<DiagnosticData, CodeActionPriority>;
[Export(typeof(ISuggestedActionsSourceProvider))]
[Export(typeof(SuggestedActionsSourceProvider))]
[VisualStudio.Utilities.ContentType(ContentTypeNames.RoslynContentType)]
[VisualStudio.Utilities.ContentType(ContentTypeNames.XamlContentType)]
[VisualStudio.Utilities.Name("Roslyn Code Fix")]
[VisualStudio.Utilities.Order]
internal class SuggestedActionsSourceProvider : ISuggestedActionsSourceProvider
{
private static readonly Guid s_CSharpSourceGuid = new Guid("b967fea8-e2c3-4984-87d4-71a38f49e16a");
private static readonly Guid s_visualBasicSourceGuid = new Guid("4de30e93-3e0c-40c2-a4ba-1124da4539f6");
private static readonly Guid s_xamlSourceGuid = new Guid("a0572245-2eab-4c39-9f61-06a6d8c5ddda");
private const int InvalidSolutionVersion = -1;
private readonly ICodeRefactoringService _codeRefactoringService;
private readonly IDiagnosticAnalyzerService _diagnosticService;
private readonly ICodeFixService _codeFixService;
public readonly ICodeActionEditHandlerService EditHandler;
public readonly IAsynchronousOperationListener OperationListener;
public readonly IWaitIndicator WaitIndicator;
[ImportingConstructor]
public SuggestedActionsSourceProvider(
ICodeRefactoringService codeRefactoringService,
IDiagnosticAnalyzerService diagnosticService,
ICodeFixService codeFixService,
ICodeActionEditHandlerService editHandler,
IWaitIndicator waitIndicator,
[ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners)
{
_codeRefactoringService = codeRefactoringService;
_diagnosticService = diagnosticService;
_codeFixService = codeFixService;
EditHandler = editHandler;
WaitIndicator = waitIndicator;
OperationListener = new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.LightBulb);
}
public ISuggestedActionsSource CreateSuggestedActionsSource(ITextView textView, ITextBuffer textBuffer)
{
Contract.ThrowIfNull(textView);
Contract.ThrowIfNull(textBuffer);
return new Source(this, textView, textBuffer);
}
private class Source : ForegroundThreadAffinitizedObject, ISuggestedActionsSource
{
// state that will be only reset when source is disposed.
private SuggestedActionsSourceProvider _owner;
private ITextView _textView;
private ITextBuffer _subjectBuffer;
private WorkspaceRegistration _registration;
// mutable state
private Workspace _workspace;
private int _lastSolutionVersionReported;
public Source(SuggestedActionsSourceProvider owner, ITextView textView, ITextBuffer textBuffer)
{
_owner = owner;
_textView = textView;
_textView.Closed += OnTextViewClosed;
_subjectBuffer = textBuffer;
_registration = Workspace.GetWorkspaceRegistration(textBuffer.AsTextContainer());
_lastSolutionVersionReported = InvalidSolutionVersion;
var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService;
updateSource.DiagnosticsUpdated += OnDiagnosticsUpdated;
if (_registration.Workspace != null)
{
_workspace = _registration.Workspace;
_workspace.DocumentActiveContextChanged += OnActiveContextChanged;
}
_registration.WorkspaceChanged += OnWorkspaceChanged;
}
public event EventHandler<EventArgs> SuggestedActionsChanged;
public bool TryGetTelemetryId(out Guid telemetryId)
{
telemetryId = default(Guid);
var workspace = _workspace;
if (workspace == null || _subjectBuffer == null)
{
return false;
}
var documentId = workspace.GetDocumentIdInCurrentContext(_subjectBuffer.AsTextContainer());
if (documentId == null)
{
return false;
}
var project = workspace.CurrentSolution.GetProject(documentId.ProjectId);
if (project == null)
{
return false;
}
switch (project.Language)
{
case LanguageNames.CSharp:
telemetryId = s_CSharpSourceGuid;
return true;
case LanguageNames.VisualBasic:
telemetryId = s_visualBasicSourceGuid;
return true;
case "Xaml":
telemetryId = s_xamlSourceGuid;
return true;
default:
return false;
}
}
public IEnumerable<SuggestedActionSet> GetSuggestedActions(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken)
{
AssertIsForeground();
using (Logger.LogBlock(FunctionId.SuggestedActions_GetSuggestedActions, cancellationToken))
{
var documentAndSnapshot = GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).WaitAndGetResult(cancellationToken);
if (!documentAndSnapshot.HasValue)
{
// this is here to fail test and see why it is failed.
Trace.WriteLine("given range is not current");
return null;
}
var document = documentAndSnapshot.Value.Item1;
var workspace = document.Project.Solution.Workspace;
var supportsFeatureService = workspace.Services.GetService<IDocumentSupportsFeatureService>();
var fixes = GetCodeFixes(supportsFeatureService, requestedActionCategories, workspace, document, range, cancellationToken);
var refactorings = GetRefactorings(supportsFeatureService, requestedActionCategories, workspace, document, range, cancellationToken);
var result = fixes.Concat(refactorings);
if (result.IsEmpty)
{
return null;
}
var allActionSets = InlineActionSetsIfDesirable(result);
return allActionSets;
}
}
private ImmutableArray<SuggestedActionSet> InlineActionSetsIfDesirable(ImmutableArray<SuggestedActionSet> allActionSets)
{
// If we only have a single set of items, and that set only has three max suggestion
// offered. Then we can consider inlining any nested actions into the top level list.
// (but we only do this if the parent of the nested actions isn't invokable itself).
if (allActionSets.Sum(a => a.Actions.Count()) > 3)
{
return allActionSets;
}
return allActionSets.SelectAsArray(InlineActions);
}
private SuggestedActionSet InlineActions(SuggestedActionSet actionSet)
{
var newActions = ArrayBuilder<ISuggestedAction>.GetInstance();
foreach (var action in actionSet.Actions)
{
var actionWithNestedActions = action as SuggestedActionWithNestedActions;
// Only inline if the underlying code action allows it.
if (actionWithNestedActions?.CodeAction.IsInlinable == true)
{
newActions.AddRange(actionWithNestedActions.NestedActionSet.Actions);
}
else
{
newActions.Add(action);
}
}
return new SuggestedActionSet(
newActions.ToImmutableAndFree(), actionSet.Title, actionSet.Priority, actionSet.ApplicableToSpan);
}
private ImmutableArray<SuggestedActionSet> GetCodeFixes(
IDocumentSupportsFeatureService supportsFeatureService,
ISuggestedActionCategorySet requestedActionCategories,
Workspace workspace,
Document document,
SnapshotSpan range,
CancellationToken cancellationToken)
{
this.AssertIsForeground();
if (_owner._codeFixService != null &&
supportsFeatureService.SupportsCodeFixes(document) &&
requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix))
{
// We only include suppressions if lightbulb is asking for everything.
// If the light bulb is only asking for code fixes, then we don't include suppressions.
var includeSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any);
var fixes = Task.Run(
() => _owner._codeFixService.GetFixesAsync(
document, range.Span.ToTextSpan(), includeSuppressionFixes, cancellationToken),
cancellationToken).WaitAndGetResult(cancellationToken);
var filteredFixes = FilterOnUIThread(fixes, workspace);
return OrganizeFixes(workspace, filteredFixes, includeSuppressionFixes);
}
return ImmutableArray<SuggestedActionSet>.Empty;
}
private ImmutableArray<CodeFixCollection> FilterOnUIThread(
ImmutableArray<CodeFixCollection> collections, Workspace workspace)
{
this.AssertIsForeground();
return collections.Select(c => FilterOnUIThread(c, workspace)).WhereNotNull().ToImmutableArray();
}
private CodeFixCollection FilterOnUIThread(
CodeFixCollection collection,
Workspace workspace)
{
this.AssertIsForeground();
var applicableFixes = collection.Fixes.WhereAsArray(f => IsApplicable(f.Action, workspace));
return applicableFixes.Length == 0
? null
: applicableFixes.Length == collection.Fixes.Length
? collection
: new CodeFixCollection(collection.Provider, collection.TextSpan, applicableFixes,
collection.FixAllState,
collection.SupportedScopes, collection.FirstDiagnostic);
}
private bool IsApplicable(CodeAction action, Workspace workspace)
{
if (!action.PerformFinalApplicabilityCheck)
{
// If we don't even need to perform the final applicability check,
// then the code actoin is applicable.
return true;
}
// Otherwise, defer to the action to make the decision.
this.AssertIsForeground();
return action.IsApplicable(workspace);
}
private ImmutableArray<CodeRefactoring> FilterOnUIThread(ImmutableArray<CodeRefactoring> refactorings, Workspace workspace)
{
return refactorings.Select(r => FilterOnUIThread(r, workspace)).WhereNotNull().ToImmutableArray();
}
private CodeRefactoring FilterOnUIThread(CodeRefactoring refactoring, Workspace workspace)
{
var actions = refactoring.Actions.Where(a => IsApplicable(a, workspace)).ToList();
return actions.Count == 0
? null
: actions.Count == refactoring.Actions.Count
? refactoring
: new CodeRefactoring(refactoring.Provider, actions);
}
/// <summary>
/// Arrange fixes into groups based on the issue (diagnostic being fixed) and prioritize these groups.
/// </summary>
private ImmutableArray<SuggestedActionSet> OrganizeFixes(
Workspace workspace, ImmutableArray<CodeFixCollection> fixCollections,
bool includeSuppressionFixes)
{
var map = ImmutableDictionary.CreateBuilder<CodeFixGroupKey, IList<SuggestedAction>>();
var order = ArrayBuilder<CodeFixGroupKey>.GetInstance();
// First group fixes by diagnostic and priority.
GroupFixes(workspace, fixCollections, map, order, includeSuppressionFixes);
// Then prioritize between the groups.
return PrioritizeFixGroups(map.ToImmutable(), order.ToImmutableAndFree());
}
/// <summary>
/// Groups fixes by the diagnostic being addressed by each fix.
/// </summary>
private void GroupFixes(
Workspace workspace,
ImmutableArray<CodeFixCollection> fixCollections,
IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map,
ArrayBuilder<CodeFixGroupKey> order,
bool includeSuppressionFixes)
{
foreach (var fixCollection in fixCollections)
{
ProcessFixCollection(
workspace, map, order, includeSuppressionFixes, fixCollection);
}
}
private void ProcessFixCollection(
Workspace workspace,
IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map,
ArrayBuilder<CodeFixGroupKey> order,
bool includeSuppressionFixes,
CodeFixCollection fixCollection)
{
var fixes = fixCollection.Fixes;
var fixCount = fixes.Length;
Func<CodeAction, SuggestedActionSet> getFixAllSuggestedActionSet =
codeAction => GetFixAllSuggestedActionSet(
codeAction, fixCount, fixCollection.FixAllState,
fixCollection.SupportedScopes, fixCollection.FirstDiagnostic,
workspace);
var nonSupressionCodeFixes = fixes.WhereAsArray(f => !(f.Action is TopLevelSuppressionCodeAction));
var supressionCodeFixes = fixes.WhereAsArray(f => f.Action is TopLevelSuppressionCodeAction);
AddCodeActions(workspace, map, order, fixCollection,
getFixAllSuggestedActionSet, nonSupressionCodeFixes);
// Add suppression fixes to the end of a given SuggestedActionSet so that they
// always show up last in a group.
if (includeSuppressionFixes)
{
AddCodeActions(workspace, map, order, fixCollection,
getFixAllSuggestedActionSet, supressionCodeFixes);
}
}
private void AddCodeActions(
Workspace workspace, IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map,
ArrayBuilder<CodeFixGroupKey> order, CodeFixCollection fixCollection,
Func<CodeAction, SuggestedActionSet> getFixAllSuggestedActionSet,
ImmutableArray<CodeFix> codeFixes)
{
foreach (var fix in codeFixes)
{
SuggestedAction suggestedAction;
if (fix.Action.NestedCodeActions.Length > 0)
{
var nestedActions = fix.Action.NestedCodeActions.SelectAsArray(
nestedAction => new CodeFixSuggestedAction(
_owner, workspace, _subjectBuffer, fix, fixCollection.Provider,
nestedAction, getFixAllSuggestedActionSet(nestedAction)));
var set = new SuggestedActionSet(
nestedActions, SuggestedActionSetPriority.Medium,
fix.PrimaryDiagnostic.Location.SourceSpan.ToSpan());
suggestedAction = new SuggestedActionWithNestedActions(
_owner, workspace, _subjectBuffer,
fixCollection.Provider, fix.Action, set);
}
else
{
suggestedAction = new CodeFixSuggestedAction(
_owner, workspace, _subjectBuffer, fix, fixCollection.Provider,
fix.Action, getFixAllSuggestedActionSet(fix.Action));
}
AddFix(fix, suggestedAction, map, order);
}
}
private static void AddFix(
CodeFix fix, SuggestedAction suggestedAction,
IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map,
ArrayBuilder<CodeFixGroupKey> order)
{
var diag = fix.GetPrimaryDiagnosticData();
var groupKey = new CodeFixGroupKey(diag, fix.Action.Priority);
if (!map.ContainsKey(groupKey))
{
order.Add(groupKey);
map[groupKey] = ImmutableArray.CreateBuilder<SuggestedAction>();
}
map[groupKey].Add(suggestedAction);
}
/// <summary>
/// If the provided fix all context is non-null and the context's code action Id matches the given code action's Id then,
/// returns the set of fix all occurrences actions associated with the code action.
/// </summary>
internal SuggestedActionSet GetFixAllSuggestedActionSet(
CodeAction action,
int actionCount,
FixAllState fixAllState,
ImmutableArray<FixAllScope> supportedScopes,
Diagnostic firstDiagnostic,
Workspace workspace)
{
if (fixAllState == null)
{
return null;
}
if (actionCount > 1 && action.EquivalenceKey == null)
{
return null;
}
var fixAllSuggestedActions = ArrayBuilder<FixAllSuggestedAction>.GetInstance();
foreach (var scope in supportedScopes)
{
var fixAllStateForScope = fixAllState.WithScopeAndEquivalenceKey(scope, action.EquivalenceKey);
var fixAllSuggestedAction = new FixAllSuggestedAction(
_owner, workspace, _subjectBuffer, fixAllStateForScope,
firstDiagnostic, action);
fixAllSuggestedActions.Add(fixAllSuggestedAction);
}
return new SuggestedActionSet(
fixAllSuggestedActions.ToImmutableAndFree(),
title: EditorFeaturesResources.Fix_all_occurrences_in);
}
/// <summary>
/// Return prioritized set of fix groups such that fix group for suppression always show up at the bottom of the list.
/// </summary>
/// <remarks>
/// Fix groups are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>.
/// Priority for all <see cref="SuggestedActionSet"/>s containing fixes is set to <see cref="SuggestedActionSetPriority.Medium"/> by default.
/// The only exception is the case where a <see cref="SuggestedActionSet"/> only contains suppression fixes -
/// the priority of such <see cref="SuggestedActionSet"/>s is set to <see cref="SuggestedActionSetPriority.None"/> so that suppression fixes
/// always show up last after all other fixes (and refactorings) for the selected line of code.
/// </remarks>
private static ImmutableArray<SuggestedActionSet> PrioritizeFixGroups(
IDictionary<CodeFixGroupKey, IList<SuggestedAction>> map, IList<CodeFixGroupKey> order)
{
var sets = ArrayBuilder<SuggestedActionSet>.GetInstance();
foreach (var diag in order)
{
var actions = map[diag];
foreach (var group in actions.GroupBy(a => a.Priority))
{
var priority = GetSuggestedActionSetPriority(group.Key);
// diagnostic from things like build shouldn't reach here since we don't support LB for those diagnostics
Contract.Requires(diag.Item1.HasTextSpan);
sets.Add(new SuggestedActionSet(group, priority, diag.Item1.TextSpan.ToSpan()));
}
}
return sets.ToImmutableAndFree();
}
private static SuggestedActionSetPriority GetSuggestedActionSetPriority(CodeActionPriority key)
{
switch (key)
{
case CodeActionPriority.None: return SuggestedActionSetPriority.None;
case CodeActionPriority.Low: return SuggestedActionSetPriority.Low;
case CodeActionPriority.Medium: return SuggestedActionSetPriority.Medium;
case CodeActionPriority.High: return SuggestedActionSetPriority.High;
default:
throw new InvalidOperationException();
}
}
private ImmutableArray<SuggestedActionSet> GetRefactorings(
IDocumentSupportsFeatureService supportsFeatureService,
ISuggestedActionCategorySet requestedActionCategories,
Workspace workspace,
Document document,
SnapshotSpan range,
CancellationToken cancellationToken)
{
this.AssertIsForeground();
if (workspace.Options.GetOption(EditorComponentOnOffOptions.CodeRefactorings) &&
_owner._codeRefactoringService != null &&
supportsFeatureService.SupportsRefactorings(document) &&
requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring))
{
// Get the selection while on the UI thread.
var selection = TryGetCodeRefactoringSelection(_subjectBuffer, _textView, range);
if (!selection.HasValue)
{
// this is here to fail test and see why it is failed.
Trace.WriteLine("given range is not current");
return ImmutableArray<SuggestedActionSet>.Empty;
}
// It may seem strange that we kick off a task, but then immediately 'Wait' on
// it. However, it's deliberate. We want to make sure that the code runs on
// the background so that no one takes an accidently dependency on running on
// the UI thread.
var refactorings = Task.Run(
() => _owner._codeRefactoringService.GetRefactoringsAsync(
document, selection.Value, cancellationToken),
cancellationToken).WaitAndGetResult(cancellationToken);
var filteredRefactorings = FilterOnUIThread(refactorings, workspace);
return filteredRefactorings.SelectAsArray(r => OrganizeRefactorings(workspace, r));
}
return ImmutableArray<SuggestedActionSet>.Empty;
}
/// <summary>
/// Arrange refactorings into groups.
/// </summary>
/// <remarks>
/// Refactorings are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>.
/// Priority for all <see cref="SuggestedActionSet"/>s containing refactorings is set to <see cref="SuggestedActionSetPriority.Low"/>
/// and should show up after fixes but before suppression fixes in the light bulb menu.
/// </remarks>
private SuggestedActionSet OrganizeRefactorings(Workspace workspace, CodeRefactoring refactoring)
{
var refactoringSuggestedActions = ArrayBuilder<SuggestedAction>.GetInstance();
foreach (var action in refactoring.Actions)
{
refactoringSuggestedActions.Add(new CodeRefactoringSuggestedAction(
_owner, workspace, _subjectBuffer, refactoring.Provider, action));
}
return new SuggestedActionSet(
refactoringSuggestedActions.ToImmutableAndFree(), SuggestedActionSetPriority.Low);
}
public async Task<bool> HasSuggestedActionsAsync(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken)
{
// Explicitly hold onto below fields in locals and use these locals throughout this code path to avoid crashes
// if these fields happen to be cleared by Dispose() below. This is required since this code path involves
// code that can run asynchronously from background thread.
var view = _textView;
var buffer = _subjectBuffer;
var provider = _owner;
if (view == null || buffer == null || provider == null)
{
return false;
}
using (var asyncToken = provider.OperationListener.BeginAsyncOperation("HasSuggestedActionsAsync"))
{
var documentAndSnapshot = await GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).ConfigureAwait(false);
if (!documentAndSnapshot.HasValue)
{
// this is here to fail test and see why it is failed.
Trace.WriteLine("given range is not current");
return false;
}
var document = documentAndSnapshot.Value.Item1;
var workspace = document.Project.Solution.Workspace;
var supportsFeatureService = workspace.Services.GetService<IDocumentSupportsFeatureService>();
return
await HasFixesAsync(
supportsFeatureService, requestedActionCategories, provider, document, range,
cancellationToken).ConfigureAwait(false) ||
await HasRefactoringsAsync(
supportsFeatureService, requestedActionCategories, provider, document, buffer, view, range,
cancellationToken).ConfigureAwait(false);
}
}
private async Task<bool> HasFixesAsync(
IDocumentSupportsFeatureService supportsFeatureService,
ISuggestedActionCategorySet requestedActionCategories,
SuggestedActionsSourceProvider provider,
Document document, SnapshotSpan range,
CancellationToken cancellationToken)
{
if (provider._codeFixService != null && supportsFeatureService.SupportsCodeFixes(document) &&
requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix))
{
// We only consider suppressions if lightbulb is asking for everything.
// If the light bulb is only asking for code fixes, then we don't consider suppressions.
var considerSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any);
var result = await Task.Run(
() => provider._codeFixService.GetFirstDiagnosticWithFixAsync(
document, range.Span.ToTextSpan(), considerSuppressionFixes, cancellationToken),
cancellationToken).ConfigureAwait(false);
if (result.HasFix)
{
Logger.Log(FunctionId.SuggestedActions_HasSuggestedActionsAsync);
return true;
}
if (result.PartialResult)
{
// reset solution version number so that we can raise suggested action changed event
Volatile.Write(ref _lastSolutionVersionReported, InvalidSolutionVersion);
return false;
}
}
return false;
}
private async Task<bool> HasRefactoringsAsync(
IDocumentSupportsFeatureService supportsFeatureService,
ISuggestedActionCategorySet requestedActionCategories,
SuggestedActionsSourceProvider provider,
Document document,
ITextBuffer buffer,
ITextView view,
SnapshotSpan range,
CancellationToken cancellationToken)
{
if (!requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring))
{
// See if we should still show the light bulb, even if we weren't explicitly
// asked for refactorings. We'll show the lightbulb if we're currently
// flighting the "Refactoring" A/B test, or if a special option is set
// enabling this internally.
var workspace = document.Project.Solution.Workspace;
var experimentationService = workspace.Services.GetService<IExperimentationService>();
if (!experimentationService.IsExperimentEnabled("Refactoring") &&
!workspace.Options.GetOption(EditorComponentOnOffOptions.ShowCodeRefactoringsWhenQueriedForCodeFixes))
{
return false;
}
}
if (document.Project.Solution.Options.GetOption(EditorComponentOnOffOptions.CodeRefactorings) &&
provider._codeRefactoringService != null &&
supportsFeatureService.SupportsRefactorings(document))
{
TextSpan? selection = null;
if (IsForeground())
{
// This operation needs to happen on UI thread because it needs to access textView.Selection.
selection = TryGetCodeRefactoringSelection(buffer, view, range);
}
else
{
await InvokeBelowInputPriority(() =>
{
// This operation needs to happen on UI thread because it needs to access textView.Selection.
selection = TryGetCodeRefactoringSelection(buffer, view, range);
}).ConfigureAwait(false);
}
if (!selection.HasValue)
{
// this is here to fail test and see why it is failed.
Trace.WriteLine("given range is not current");
return false;
}
return await Task.Run(
() => provider._codeRefactoringService.HasRefactoringsAsync(
document, selection.Value, cancellationToken),
cancellationToken).ConfigureAwait(false);
}
return false;
}
private static TextSpan? TryGetCodeRefactoringSelection(ITextBuffer buffer, ITextView view, SnapshotSpan range)
{
var selectedSpans = view.Selection.SelectedSpans
.SelectMany(ss => view.BufferGraph.MapDownToBuffer(ss, SpanTrackingMode.EdgeExclusive, buffer))
.Where(ss => !view.IsReadOnlyOnSurfaceBuffer(ss))
.ToList();
// We only support refactorings when there is a single selection in the document.
if (selectedSpans.Count != 1)
{
return null;
}
var translatedSpan = selectedSpans[0].TranslateTo(range.Snapshot, SpanTrackingMode.EdgeInclusive);
// We only support refactorings when selected span intersects with the span that the light bulb is asking for.
if (!translatedSpan.IntersectsWith(range))
{
return null;
}
return translatedSpan.Span.ToTextSpan();
}
private static async Task<ValueTuple<Document, ITextSnapshot>?> GetMatchingDocumentAndSnapshotAsync(ITextSnapshot givenSnapshot, CancellationToken cancellationToken)
{
var buffer = givenSnapshot.TextBuffer;
if (buffer == null)
{
return null;
}
var workspace = buffer.GetWorkspace();
if (workspace == null)
{
return null;
}
var documentId = workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer());
if (documentId == null)
{
return null;
}
var document = workspace.CurrentSolution.GetDocument(documentId);
if (document == null)
{
return null;
}
var sourceText = await document.GetTextAsync(cancellationToken).ConfigureAwait(false);
cancellationToken.ThrowIfCancellationRequested();
var snapshot = sourceText.FindCorrespondingEditorTextSnapshot();
if (snapshot == null || snapshot.Version.ReiteratedVersionNumber != givenSnapshot.Version.ReiteratedVersionNumber)
{
return null;
}
return ValueTuple.Create(document, snapshot);
}
private void OnTextViewClosed(object sender, EventArgs e)
{
Dispose();
}
private void OnWorkspaceChanged(object sender, EventArgs e)
{
// REVIEW: this event should give both old and new workspace as argument so that
// one doesn't need to hold onto workspace in field.
// remove existing event registration
if (_workspace != null)
{
_workspace.DocumentActiveContextChanged -= OnActiveContextChanged;
}
// REVIEW: why one need to get new workspace from registration? why not just pass in the new workspace?
// add new event registration
_workspace = _registration.Workspace;
if (_workspace != null)
{
_workspace.DocumentActiveContextChanged += OnActiveContextChanged;
}
}
private void OnActiveContextChanged(object sender, DocumentActiveContextChangedEventArgs e)
{
// REVIEW: it would be nice for changed event to pass in both old and new document.
OnSuggestedActionsChanged(e.Solution.Workspace, e.NewActiveContextDocumentId, e.Solution.WorkspaceVersion);
}
private void OnDiagnosticsUpdated(object sender, DiagnosticsUpdatedArgs e)
{
// document removed case. no reason to raise event
if (e.Solution == null)
{
return;
}
OnSuggestedActionsChanged(e.Workspace, e.DocumentId, e.Solution.WorkspaceVersion);
}
private void OnSuggestedActionsChanged(Workspace currentWorkspace, DocumentId currentDocumentId, int solutionVersion, DiagnosticsUpdatedArgs args = null)
{
// Explicitly hold onto the _subjectBuffer field in a local and use this local in this function to avoid crashes
// if this field happens to be cleared by Dispose() below. This is required since this code path involves code
// that can run on background thread.
var buffer = _subjectBuffer;
if (buffer == null)
{
return;
}
var workspace = buffer.GetWorkspace();
// workspace is not ready, nothing to do.
if (workspace == null || workspace != currentWorkspace)
{
return;
}
if (currentDocumentId != workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()) ||
solutionVersion == Volatile.Read(ref _lastSolutionVersionReported))
{
return;
}
this.SuggestedActionsChanged?.Invoke(this, EventArgs.Empty);
Volatile.Write(ref _lastSolutionVersionReported, solutionVersion);
}
public void Dispose()
{
if (_owner != null)
{
var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService;
updateSource.DiagnosticsUpdated -= OnDiagnosticsUpdated;
_owner = null;
}
if (_workspace != null)
{
_workspace.DocumentActiveContextChanged -= OnActiveContextChanged;
_workspace = null;
}
if (_registration != null)
{
_registration.WorkspaceChanged -= OnWorkspaceChanged;
_registration = null;
}
if (_textView != null)
{
_textView.Closed -= OnTextViewClosed;
_textView = null;
}
if (_subjectBuffer != null)
{
_subjectBuffer = null;
}
}
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections;
using System.Collections.Generic;
using HtcSharp.HttpModule.Http.Features;
using Microsoft.Extensions.Primitives;
using HtcSharp.HttpModule.Http.Headers;
namespace HtcSharp.HttpModule.Http {
// SourceTools-Start
// Remote-File C:\ASP\src\Http\Http\src\HeaderDictionary.cs
// Start-At-Remote-Line 11
// SourceTools-End
/// <summary>
/// Represents a wrapper for RequestHeaders and ResponseHeaders.
/// </summary>
public class HeaderDictionary : IHeaderDictionary {
private static readonly string[] EmptyKeys = Array.Empty<string>();
private static readonly StringValues[] EmptyValues = Array.Empty<StringValues>();
private static readonly Enumerator EmptyEnumerator = new Enumerator();
// Pre-box
private static readonly IEnumerator<KeyValuePair<string, StringValues>> EmptyIEnumeratorType = EmptyEnumerator;
private static readonly IEnumerator EmptyIEnumerator = EmptyEnumerator;
public HeaderDictionary() {
}
public HeaderDictionary(Dictionary<string, StringValues> store) {
Store = store;
}
public HeaderDictionary(int capacity) {
EnsureStore(capacity);
}
private Dictionary<string, StringValues> Store { get; set; }
private void EnsureStore(int capacity) {
if (Store == null) {
Store = new Dictionary<string, StringValues>(capacity, StringComparer.OrdinalIgnoreCase);
}
}
/// <summary>
/// Get or sets the associated value from the collection as a single string.
/// </summary>
/// <param name="key">The header name.</param>
/// <returns>the associated value from the collection as a StringValues or StringValues.Empty if the key is not present.</returns>
public StringValues this[string key] {
get {
if (Store == null) {
return StringValues.Empty;
}
StringValues value;
if (TryGetValue(key, out value)) {
return value;
}
return StringValues.Empty;
}
set {
if (key == null) {
throw new ArgumentNullException(nameof(key));
}
ThrowIfReadOnly();
if (value.Count == 0) {
Store?.Remove(key);
} else {
EnsureStore(1);
Store[key] = value;
}
}
}
/// <summary>
/// Throws KeyNotFoundException if the key is not present.
/// </summary>
/// <param name="key">The header name.</param>
/// <returns></returns>
StringValues IDictionary<string, StringValues>.this[string key] {
get { return Store[key]; }
set {
ThrowIfReadOnly();
this[key] = value;
}
}
public long? ContentLength {
get {
long value;
var rawValue = this[HeaderNames.ContentLength];
if (rawValue.Count == 1 &&
!string.IsNullOrEmpty(rawValue[0]) &&
HeaderUtilities.TryParseNonNegativeInt64(new StringSegment(rawValue[0]).Trim(), out value)) {
return value;
}
return null;
}
set {
ThrowIfReadOnly();
if (value.HasValue) {
this[HeaderNames.ContentLength] = HeaderUtilities.FormatNonNegativeInt64(value.GetValueOrDefault());
} else {
this.Remove(HeaderNames.ContentLength);
}
}
}
/// <summary>
/// Gets the number of elements contained in the <see cref="HeaderDictionary" />;.
/// </summary>
/// <returns>The number of elements contained in the <see cref="HeaderDictionary" />.</returns>
public int Count => Store?.Count ?? 0;
/// <summary>
/// Gets a value that indicates whether the <see cref="HeaderDictionary" /> is in read-only mode.
/// </summary>
/// <returns>true if the <see cref="HeaderDictionary" /> is in read-only mode; otherwise, false.</returns>
public bool IsReadOnly { get; set; }
public ICollection<string> Keys {
get {
if (Store == null) {
return EmptyKeys;
}
return Store.Keys;
}
}
public ICollection<StringValues> Values {
get {
if (Store == null) {
return EmptyValues;
}
return Store.Values;
}
}
/// <summary>
/// Adds a new list of items to the collection.
/// </summary>
/// <param name="item">The item to add.</param>
public void Add(KeyValuePair<string, StringValues> item) {
if (item.Key == null) {
throw new ArgumentNullException("The key is null");
}
ThrowIfReadOnly();
EnsureStore(1);
Store.Add(item.Key, item.Value);
}
/// <summary>
/// Adds the given header and values to the collection.
/// </summary>
/// <param name="key">The header name.</param>
/// <param name="value">The header values.</param>
public void Add(string key, StringValues value) {
if (key == null) {
throw new ArgumentNullException(nameof(key));
}
ThrowIfReadOnly();
EnsureStore(1);
Store.Add(key, value);
}
/// <summary>
/// Clears the entire list of objects.
/// </summary>
public void Clear() {
ThrowIfReadOnly();
Store?.Clear();
}
/// <summary>
/// Returns a value indicating whether the specified object occurs within this collection.
/// </summary>
/// <param name="item">The item.</param>
/// <returns>true if the specified object occurs within this collection; otherwise, false.</returns>
public bool Contains(KeyValuePair<string, StringValues> item) {
StringValues value;
if (Store == null ||
!Store.TryGetValue(item.Key, out value) ||
!StringValues.Equals(value, item.Value)) {
return false;
}
return true;
}
/// <summary>
/// Determines whether the <see cref="HeaderDictionary" /> contains a specific key.
/// </summary>
/// <param name="key">The key.</param>
/// <returns>true if the <see cref="HeaderDictionary" /> contains a specific key; otherwise, false.</returns>
public bool ContainsKey(string key) {
if (Store == null) {
return false;
}
return Store.ContainsKey(key);
}
/// <summary>
/// Copies the <see cref="HeaderDictionary" /> elements to a one-dimensional Array instance at the specified index.
/// </summary>
/// <param name="array">The one-dimensional Array that is the destination of the specified objects copied from the <see cref="HeaderDictionary" />.</param>
/// <param name="arrayIndex">The zero-based index in <paramref name="array" /> at which copying begins.</param>
public void CopyTo(KeyValuePair<string, StringValues>[] array, int arrayIndex) {
if (Store == null) {
return;
}
foreach (var item in Store) {
array[arrayIndex] = item;
arrayIndex++;
}
}
/// <summary>
/// Removes the given item from the the collection.
/// </summary>
/// <param name="item">The item.</param>
/// <returns>true if the specified object was removed from the collection; otherwise, false.</returns>
public bool Remove(KeyValuePair<string, StringValues> item) {
ThrowIfReadOnly();
if (Store == null) {
return false;
}
StringValues value;
if (Store.TryGetValue(item.Key, out value) && StringValues.Equals(item.Value, value)) {
return Store.Remove(item.Key);
}
return false;
}
/// <summary>
/// Removes the given header from the collection.
/// </summary>
/// <param name="key">The header name.</param>
/// <returns>true if the specified object was removed from the collection; otherwise, false.</returns>
public bool Remove(string key) {
ThrowIfReadOnly();
if (Store == null) {
return false;
}
return Store.Remove(key);
}
/// <summary>
/// Retrieves a value from the dictionary.
/// </summary>
/// <param name="key">The header name.</param>
/// <param name="value">The value.</param>
/// <returns>true if the <see cref="HeaderDictionary" /> contains the key; otherwise, false.</returns>
public bool TryGetValue(string key, out StringValues value) {
if (Store == null) {
value = default(StringValues);
return false;
}
return Store.TryGetValue(key, out value);
}
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>An <see cref="Enumerator" /> object that can be used to iterate through the collection.</returns>
public Enumerator GetEnumerator() {
if (Store == null || Store.Count == 0) {
// Non-boxed Enumerator
return EmptyEnumerator;
}
return new Enumerator(Store.GetEnumerator());
}
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>An <see cref="IEnumerator" /> object that can be used to iterate through the collection.</returns>
IEnumerator<KeyValuePair<string, StringValues>> IEnumerable<KeyValuePair<string, StringValues>>.GetEnumerator() {
if (Store == null || Store.Count == 0) {
// Non-boxed Enumerator
return EmptyIEnumeratorType;
}
return Store.GetEnumerator();
}
/// <summary>
/// Returns an enumerator that iterates through a collection.
/// </summary>
/// <returns>An <see cref="IEnumerator" /> object that can be used to iterate through the collection.</returns>
IEnumerator IEnumerable.GetEnumerator() {
if (Store == null || Store.Count == 0) {
// Non-boxed Enumerator
return EmptyIEnumerator;
}
return Store.GetEnumerator();
}
private void ThrowIfReadOnly() {
if (IsReadOnly) {
throw new InvalidOperationException("The response headers cannot be modified because the response has already started.");
}
}
public struct Enumerator : IEnumerator<KeyValuePair<string, StringValues>> {
// Do NOT make this readonly, or MoveNext will not work
private Dictionary<string, StringValues>.Enumerator _dictionaryEnumerator;
private bool _notEmpty;
internal Enumerator(Dictionary<string, StringValues>.Enumerator dictionaryEnumerator) {
_dictionaryEnumerator = dictionaryEnumerator;
_notEmpty = true;
}
public bool MoveNext() {
if (_notEmpty) {
return _dictionaryEnumerator.MoveNext();
}
return false;
}
public KeyValuePair<string, StringValues> Current {
get {
if (_notEmpty) {
return _dictionaryEnumerator.Current;
}
return default(KeyValuePair<string, StringValues>);
}
}
public void Dispose() {
}
object IEnumerator.Current {
get { return Current; }
}
void IEnumerator.Reset() {
if (_notEmpty) {
((IEnumerator) _dictionaryEnumerator).Reset();
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Runtime.CompilerServices;
interface I<T>
{
int E(T t);
}
sealed class J : I<string>
{
public int E(string s)
{
return s.Length;
}
}
class K : I<string>
{
public int E(string s)
{
return s.GetHashCode();
}
}
sealed class L : K, I<object>
{
public int E(object o)
{
return o.GetHashCode();
}
}
class F
{
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIString<T>(I<T> i)
{
return i is I<string>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsI<T,U>(I<U> i)
{
return i is I<T>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsJI<T>(J j)
{
return j is I<T>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsKI<T>(K k)
{
return k is I<T>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsLI<T>(L l)
{
return l is I<T>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsJIString(J j)
{
return j is I<string>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsKIString(K k)
{
return k is I<string>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsLIString(L l)
{
return l is I<string>;
}
#pragma warning disable CS0184
// warning CS0184: The given expression is never of the provided ('I<object>') type
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsJIObject(J j)
{
return j is I<object>;
}
#pragma warning restore CS0184
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsKIObject(K k)
{
return k is I<object>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsLIObject(L l)
{
return l is I<object>;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIStringJ(I<string> i)
{
return i is J;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIStringK(I<string> i)
{
return i is K;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIStringL(I<string> i)
{
return i is L;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIJ<T>(I<T> i)
{
return i is J;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIK<T>(I<T> i)
{
return i is K;
}
[MethodImpl(MethodImplOptions.NoInlining)]
static bool IsIL<T>(I<T> i)
{
return i is K;
}
public static int Main()
{
var j = new J();
var k = new K();
var l = new L();
bool b0 = IsIString(j);
bool b1 = IsIString(k);
bool b2 = IsIString<string>(l);
bool b3 = IsIString<object>(l);
bool c0 = IsI<string,string>(j);
bool c1 = IsI<string,string>(k);
bool c2 = IsI<string,string>(l);
bool d0 = IsI<object,string>(j);
bool d1 = IsI<object,string>(k);
bool d2 = IsI<object,string>(l);
bool e0 = IsJI<string>(j);
bool e1 = IsKI<string>(k);
bool e2 = IsKI<string>(l);
bool e3 = IsLI<string>(l);
bool f0 = IsJIString(j);
bool f1 = IsKIString(k);
bool f2 = IsKIString(l);
bool f3 = IsLIString(l);
bool g0 = IsIStringJ(j);
bool g1 = IsIStringJ(k);
bool g2 = IsIStringJ(l);
bool g3 = IsIStringK(j);
bool g4 = IsIStringK(k);
bool g5 = IsIStringK(l);
bool g6 = IsIStringL(j);
bool g7 = IsIStringL(k);
bool g8 = IsIStringL(l);
bool h0 = IsIJ<string>(j);
bool h1 = IsIJ<string>(k);
bool h2 = IsIJ<string>(l);
bool h3 = IsIK<string>(j);
bool h4 = IsIK<string>(k);
bool h5 = IsIK<string>(l);
bool h6 = IsIL<string>(j);
bool h7 = IsIL<string>(k);
bool h8 = IsIL<string>(l);
bool j0 = IsJIObject(j);
bool j1 = IsKIObject(k);
bool j2 = IsKIObject(l);
bool j3 = IsLIObject(l);
bool pos =
b0 & b1 & b2 & b3
& c0 & c1 & c2
& d2
& e0 & e1 & e2 & e3
& f0 & f1 & f2 & f3
& g0 & g4 & g5 & g8
& h0 & h4 & h5 & h8
& j2 & j3;
bool neg =
d0 & d1
& g1 & g2 & g6 & g7
& h1 & h2 & h6 & h7
& j0 & j1;
return pos & !neg ? 100 : 0;
}
}
| |
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ShopifySharp
{
public class Shop: ShopifyObject
{
/// <summary>
/// The shop's street address.
/// </summary>
[JsonProperty("address1")]
public string Address1 { get; set; }
/// <summary>
/// The optional second line of the shop's street address.
/// </summary>
[JsonProperty("address2")]
public string Address2 { get; set; }
/// <summary>
/// The city in which the shop is located.
/// </summary>
[JsonProperty("city")]
public string City { get; set; }
/// <summary>
/// The shop's country (by default equal to the two-letter country code).
/// </summary>
[JsonProperty("country")]
public string Country { get; set; }
/// <summary>
/// The two-letter country code corresponding to the shop's country.
/// </summary>
[JsonProperty("country_code")]
public string CountryCode { get; set; }
/// <summary>
/// The shop's normalized country name.
/// </summary>
[JsonProperty("country_name")]
public string CountryName { get; set; }
/// <summary>
/// The date and time when the shop was created.
/// </summary>
[JsonProperty("created_at")]
public DateTimeOffset? CreatedAt { get; set; }
/// <summary>
/// The customer's email.
/// </summary>
[JsonProperty("customer_email")]
public string CustomerEmail { get; set; }
/// <summary>
/// The three-letter code for the currency that the shop accepts.
/// </summary>
[JsonProperty("currency")]
public string Currency { get; set; }
/// <summary>
/// The shop's description.
/// </summary>
[JsonProperty("description")]
public string Description { get; set; }
/// <summary>
/// The shop's domain.
/// </summary>
[JsonProperty("domain")]
public string Domain { get; set; }
/// <summary>
/// The contact email address for the shop.
/// </summary>
[JsonProperty("email")]
public string Email { get; set; }
/// <summary>
/// Enabled currencies
/// </summary>
[JsonProperty("enabled_presentment_currencies")]
public string[] EnabledPresentmentCurrencies { get; set; }
/// <summary>
/// Present when a shop has a google app domain. It will be returned as a URL, else null.
/// </summary>
[JsonProperty("google_apps_domain")]
public string GoogleAppsDomain { get; set; }
/// <summary>
/// Present if a shop has google apps enabled. Those shops with this feature will be able to login to the google apps login.
/// </summary>
[JsonProperty("google_apps_login_enabled")]
public string GoogleAppsLoginEnabled { get; set; }
/// <summary>
/// Whether the shop is eligible to receive a free credit card reader from Shopify.
/// </summary>
[JsonProperty("eligible_for_card_reader_giveaway")]
public bool? EligibleForCardReaderGiveaway { get; set; }
/// <summary>
/// Whether the shop is eligible to use Shopify Payments.
/// </summary>
[JsonProperty("eligible_for_payments")]
public bool? EligibleForPayments { get; set; }
/// <summary>
/// Whether the shop is capable of accepting payments directly through the Checkout API.
/// </summary>
[JsonProperty("checkout_api_supported")]
public bool? CheckoutApiSupported { get; set; }
/// <summary>
/// Whether any active discounts exist for the shop.
/// </summary>
[JsonProperty("has_discounts")]
public bool? HasDiscounts { get; set; }
/// <summary>
/// Whether any active gift cards exist for the shop.
/// </summary>
[JsonProperty("has_gift_cards")]
public bool? HasGiftCards { get; set; }
/// <summary>
/// Geographic coordinate specifying the north/south location of a shop.
/// </summary>
[JsonProperty("latitude")]
public string Latitude { get; set; }
/// <summary>
/// Geographic coordinate specifying the east/west location of a shop.
/// </summary>
[JsonProperty("longitude")]
public string Longitude { get; set; }
/// <summary>
/// A string representing the way currency is formatted when the currency isn't specified.
/// </summary>
[JsonProperty("money_format")]
public string MoneyFormat { get; set; }
/// <summary>
/// A string representing the way currency is formatted in email notifications when the currency isn't specified.
/// </summary>
[JsonProperty("money_in_emails_format")]
public string MoneyInEmailsFormat { get; set; }
/// <summary>
/// A string representing the way currency is formatted when the currency is specified.
/// </summary>
[JsonProperty("money_with_currency_format")]
public string MoneyWithCurrencyFormat { get; set; }
/// <summary>
/// A string representing the way currency is formatted in email notifications when the currency is specified.
/// </summary>
[JsonProperty("money_with_currency_in_emails_format")]
public string MoneyWithCurrencyInEmailsFormat { get; set; }
/// <summary>
/// Whether multi-location is enabled
/// </summary>
[JsonProperty("multi_location_enabled")]
public bool? MultiLocationEnabled { get; set; }
/// <summary>
/// Whether the pre-launch page is enabled on the online storefront.
/// </summary>
[JsonProperty("pre_launch_enabled")]
public bool? PreLaunchEnabled { get; set; }
/// <summary>
/// Whether the shop requires an extra Shopify Payments agreement.
/// </summary>
[JsonProperty("requires_extra_payments_agreement")]
public bool? RequiresExtraPaymentsAgreement { get; set; }
/// <summary>
/// The shop's 'myshopify.com' domain.
/// </summary>
[JsonProperty("myshopify_domain")]
public string MyShopifyDomain { get; set; }
/// <summary>
/// The name of the shop.
/// </summary>
[JsonProperty("name")]
public string Name { get; set; }
/// <summary>
/// The name of the Shopify plan the shop is on.
/// </summary>
[JsonProperty("plan_name")]
public string PlanName { get; set; }
/// <summary>
/// The display name of the Shopify plan the shop is on.
/// </summary>
[JsonProperty("plan_display_name")]
public string PlanDisplayName { get; set; }
/// <summary>
/// Indicates whether the Storefront password protection is enabled.
/// </summary>
[JsonProperty("password_enabled")]
public bool? PasswordEnabled { get; set; }
/// <summary>
/// The contact phone number for the shop.
/// </summary>
[JsonProperty("phone")]
public string Phone { get; set; }
/// <summary>
/// The shop's primary locale.
/// </summary>
[JsonProperty("primary_locale")]
public string PrimaryLocale { get; set; }
/// <summary>
/// The shop's normalized province or state name.
/// </summary>
[JsonProperty("province")]
public string Province { get; set; }
/// <summary>
/// The two-letter code for the shop's province or state.
/// </summary>
[JsonProperty("province_code")]
public string ProvinceCode { get; set; }
/// <summary>
/// A list of countries the shop ships products to, separated by a comma.
/// </summary>
[JsonProperty("ships_to_countries")]
public string ShipsToCountries { get; set; }
/// <summary>
/// The username of the shop owner.
/// </summary>
[JsonProperty("shop_owner")]
public string ShopOwner { get; set; }
/// <summary>
/// Unkown. Shopify documentation does not currently indicate what this property actually is.
/// </summary>
[JsonProperty("source")]
public string Source { get; set; }
/// <summary>
/// Specifies whether or not taxes were charged for shipping.
/// </summary>
/// <remarks>Although the Shopify docs don't indicate this, it's possible for the value to be null.</remarks>
[JsonProperty("tax_shipping")]
public bool? TaxShipping { get; set; }
/// <summary>
/// The setting for whether applicable taxes are included in product prices.
/// </summary>
[JsonProperty("taxes_included")]
public bool? TaxesIncluded { get; set; }
/// <summary>
/// The setting for whether the shop is applying taxes on a per-county basis or not (US-only). Valid values are: "true" or "null."
/// </summary>
[JsonProperty("county_taxes")]
public bool? CountyTaxes { get; set; }
/// <summary>
/// The name of the timezone the shop is in.
/// </summary>
[JsonProperty("timezone")]
public string Timezone { get; set; }
/// <summary>
/// The named timezone assigned by the IANA.
/// </summary>
[JsonProperty("iana_timezone")]
public string IANATimezone { get; set; }
/// <summary>
/// The zip or postal code of the shop's address.
/// </summary>
[JsonProperty("zip")]
public string Zip { get; set; }
/// <summary>
/// Indicates whether the shop has web-based storefront or not.
/// </summary>
[JsonProperty("has_storefront")]
public bool? HasStorefront { get; set; }
/// <summary>
/// Indicates whether the shop has any outstanding setup steps or not.
/// </summary>
[JsonProperty("setup_required")]
public bool? SetupRequired { get; set; }
/// <summary>
/// The default unit of weight measurement
/// </summary>
[JsonProperty("weight_unit")]
public string WeightUnit { get; set; }
/// <summary>
/// The date and time when the shop was last updated.
/// </summary>
[JsonProperty("updated_at")]
public DateTimeOffset? UpdatedAt { get; set; }
/// <summary>
/// The default location of the shop
/// </summary>
[JsonProperty("primary_location_id")]
public long? PrimaryLocationId { get; set; }
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.