context stringlengths 2.52k 185k | gt stringclasses 1
value |
|---|---|
/* ====================================================================
Copyright (C) 2004-2008 fyiReporting Software, LLC
Copyright (C) 2011 Peter Gill <peter@majorsilence.com>
This file is part of the fyiReporting RDL project.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, email info@fyireporting.com or visit
the website www.fyiReporting.com.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Data;
using System.Windows.Forms;
using System.Xml;
using System.Text;
using fyiReporting.RdlDesign.Resources;
namespace fyiReporting.RdlDesign
{
/// <summary>
/// Summary description for StyleCtl.
/// </summary>
internal partial class DataSetsCtl : System.Windows.Forms.UserControl, IProperty
{
private bool _UseTypenameQualified = false;
private DesignXmlDraw _Draw;
private XmlNode _dsNode;
private DataSetValues _dsv;
internal DataSetsCtl(DesignXmlDraw dxDraw, XmlNode dsNode)
{
_Draw = dxDraw;
_dsNode = dsNode;
// This call is required by the Windows.Forms Form Designer.
InitializeComponent();
// Initialize form using the style node values
InitValues();
}
internal DataSetValues DSV
{
get { return _dsv; }
}
private void InitValues()
{
//// cbDataSource
cbDataSource.Items.AddRange(_Draw.DataSourceNames);
//
// Obtain the existing DataSet info
//
XmlNode dNode = this._dsNode;
XmlAttribute nAttr = dNode.Attributes["Name"];
_dsv = new DataSetValues(nAttr == null ? "" : nAttr.Value);
_dsv.Node = dNode;
XmlNode ctNode = DesignXmlDraw.FindNextInHierarchy(dNode, "Query", "CommandText");
_dsv.CommandText = ctNode == null ? "" : ctNode.InnerText;
XmlNode datasource = DesignXmlDraw.FindNextInHierarchy(dNode, "Query", "DataSourceName");
_dsv.DataSourceName = datasource == null ? "" : datasource.InnerText;
XmlNode timeout = DesignXmlDraw.FindNextInHierarchy(dNode, "Query", "Timeout");
try
{
_dsv.Timeout = timeout == null ? 0 : Convert.ToInt32(timeout.InnerText);
}
catch // we don't stop just because timeout isn't convertable
{
_dsv.Timeout = 0;
}
// Get QueryParameters; they are loaded here but used by the QueryParametersCtl
_dsv.QueryParameters = new DataTable();
_dsv.QueryParameters.Columns.Add(new DataColumn("Name", typeof(string)));
_dsv.QueryParameters.Columns.Add(new DataColumn("Value", typeof(string)));
XmlNode qpNode = DesignXmlDraw.FindNextInHierarchy(dNode, "Query", "QueryParameters");
if (qpNode != null)
{
string[] rowValues = new string[2];
foreach (XmlNode qNode in qpNode.ChildNodes)
{
if (qNode.Name != "QueryParameter")
continue;
XmlAttribute xAttr = qNode.Attributes["Name"];
if (xAttr == null)
continue;
rowValues[0] = xAttr.Value;
rowValues[1] = _Draw.GetElementValue(qNode, "Value", "");
_dsv.QueryParameters.Rows.Add(rowValues);
}
}
// Get Fields
_dsv.Fields = new DataTable();
_dsv.Fields.Columns.Add(new DataColumn("Name", typeof(string)));
_dsv.Fields.Columns.Add(new DataColumn("QueryName", typeof(string)));
_dsv.Fields.Columns.Add(new DataColumn("Value", typeof(string)));
_dsv.Fields.Columns.Add(new DataColumn("TypeName", typeof(string)));
XmlNode fsNode = _Draw.GetNamedChildNode(dNode, "Fields");
if (fsNode != null)
{
string[] rowValues = new string[4];
foreach (XmlNode fNode in fsNode.ChildNodes)
{
if (fNode.Name != "Field")
continue;
XmlAttribute xAttr = fNode.Attributes["Name"];
if (xAttr == null)
continue;
rowValues[0] = xAttr.Value;
rowValues[1] = _Draw.GetElementValue(fNode, "DataField", "");
rowValues[2] = _Draw.GetElementValue(fNode, "Value", "");
string typename = null;
typename = _Draw.GetElementValue(fNode, "TypeName", null);
if (typename == null)
{
typename = _Draw.GetElementValue(fNode, "rd:TypeName", null);
if (typename != null)
_UseTypenameQualified = true; // we got it qualified so we'll generate qualified
}
if (typename != null && !dgtbTypeName.Items.Contains(typename))
{
dgtbTypeName.Items.Add(typename);
}
rowValues[3] = typename == null ? "" : typename;
_dsv.Fields.Rows.Add(rowValues);
}
}
this.tbDSName.Text = _dsv.Name;
this.tbSQL.Text = _dsv.CommandText.Replace("\r\n", "\n").Replace("\n", Environment.NewLine);
this.cbDataSource.Text = _dsv.DataSourceName;
this.tbTimeout.Value = _dsv.Timeout;
dgFields.DataSource = _dsv.Fields;
}
public bool IsValid()
{
string nerr = _Draw.NameError(this._dsNode, this.tbDSName.Text);
if (nerr != null)
{
MessageBox.Show(nerr, Strings.DataSetsCtl_Show_Name, MessageBoxButtons.OK,MessageBoxIcon.Error);
return false;
}
return true;
}
public void Apply()
{
XmlNode rNode = _Draw.GetReportNode();
XmlNode dsNode = _Draw.GetNamedChildNode(rNode, "DataSets");
XmlNode dNode = this._dsNode;
// Create the name attribute
_Draw.SetElementAttribute(dNode, "Name", _dsv.Name);
_Draw.RemoveElement(dNode, "Query"); // get rid of old query
XmlNode qNode = _Draw.CreateElement(dNode, "Query", null);
_Draw.SetElement(qNode, "DataSourceName", _dsv.DataSourceName);
if (_dsv.Timeout > 0)
_Draw.SetElement(qNode, "Timeout", _dsv.Timeout.ToString());
_Draw.SetElement(qNode, "CommandText", _dsv.CommandText);
// Handle QueryParameters
_Draw.RemoveElement(qNode, "QueryParameters"); // get rid of old QueryParameters
XmlNode qpsNode = _Draw.CreateElement(qNode, "QueryParameters", null);
foreach (DataRow dr in _dsv.QueryParameters.Rows)
{
if (dr[0] == DBNull.Value || dr[1] == null || dr[1] == DBNull.Value)
continue;
string name = (string)dr[0];
if (name.Length <= 0)
continue;
XmlNode qpNode = _Draw.CreateElement(qpsNode, "QueryParameter", null);
_Draw.SetElementAttribute(qpNode, "Name", name);
_Draw.SetElement(qpNode, "Value", (string)dr[1]);
}
if (!qpsNode.HasChildNodes) // if no parameters we don't need to define them
_Draw.RemoveElement(qNode, "QueryParameters");
// Handle Fields
_Draw.RemoveElement(dNode, "Fields"); // get rid of old Fields
XmlNode fsNode = _Draw.CreateElement(dNode, "Fields", null);
foreach (DataRow dr in _dsv.Fields.Rows)
{
if (dr[0] == DBNull.Value)
continue;
if (dr[1] == DBNull.Value && dr[2] == DBNull.Value)
continue;
XmlNode fNode = _Draw.CreateElement(fsNode, "Field", null);
_Draw.SetElementAttribute(fNode, "Name", (string)dr[0]);
if (dr[1] != DBNull.Value &&
dr[1] is string &&
(string)dr[1] != string.Empty)
_Draw.SetElement(fNode, "DataField", (string)dr[1]);
else if (dr[2] != DBNull.Value &&
dr[2] is string &&
(string)dr[2] != string.Empty)
_Draw.SetElement(fNode, "Value", (string)dr[2]);
else
_Draw.SetElement(fNode, "DataField", (string)dr[0]); // make datafield same as name
// Handle typename if any
if (dr[3] != DBNull.Value &&
dr[3] is string &&
(string)dr[3] != string.Empty)
{
_Draw.SetElement(fNode, _UseTypenameQualified ? "rd:TypeName" : "TypeName", (string)dr[3]);
}
}
}
private void tbDSName_TextChanged(object sender, System.EventArgs e)
{
_dsv.Name = tbDSName.Text;
}
private void cbDataSource_SelectedIndexChanged(object sender, System.EventArgs e)
{
_dsv.DataSourceName = cbDataSource.Text;
}
private void tbSQL_TextChanged(object sender, System.EventArgs e)
{
_dsv.CommandText = tbSQL.Text;
}
private void bDeleteField_Click(object sender, System.EventArgs e)
{
if (this.dgFields.CurrentRow.Index < 0)
return;
_dsv.Fields.Rows.RemoveAt(this.dgFields.CurrentRow.Index);
}
private void bRefresh_Click(object sender, System.EventArgs e)
{
// Need to clear all the fields and then replace with the columns
// of the SQL statement
List<SqlColumn> cols = DesignerUtility.GetSqlColumns(_Draw, cbDataSource.Text, tbSQL.Text);
if (cols == null || cols.Count <= 0)
return; // something didn't work right
_dsv.Fields.Rows.Clear();
string[] rowValues = new string[4];
foreach (SqlColumn sc in cols)
{
rowValues[0] = sc.Name;
rowValues[1] = sc.Name;
rowValues[2] = "";
DataGridViewComboBoxColumn TypeColumn = (dgFields.Columns[3] as DataGridViewComboBoxColumn);
if (!TypeColumn.Items.Contains(sc.DataType.FullName))
{
TypeColumn.Items.Add(sc.DataType.FullName);
}
rowValues[3] = sc.DataType.FullName;
_dsv.Fields.Rows.Add(rowValues);
}
}
private void bEditSQL_Click(object sender, System.EventArgs e)
{
SQLCtl sc = new SQLCtl(_Draw, cbDataSource.Text, this.tbSQL.Text, _dsv.QueryParameters);
try
{
DialogResult dr = sc.ShowDialog(this);
if (dr == DialogResult.OK)
{
tbSQL.Text = sc.SQL;
}
}
finally
{
sc.Dispose();
}
}
private void tbTimeout_ValueChanged(object sender, System.EventArgs e)
{
_dsv.Timeout = Convert.ToInt32(tbTimeout.Value);
}
}
internal class DataSetValues
{
string _Name;
string _DataSourceName;
string _CommandText;
int _Timeout;
DataTable _QueryParameters;
// of type DSQueryParameter
DataTable _Fields;
XmlNode _Node;
internal DataSetValues(string name)
{
_Name = name;
}
internal string Name
{
get { return _Name; }
set { _Name = value; }
}
internal string DataSourceName
{
get { return _DataSourceName; }
set { _DataSourceName = value; }
}
internal string CommandText
{
get { return _CommandText; }
set { _CommandText = value; }
}
internal int Timeout
{
get { return _Timeout; }
set { _Timeout = value; }
}
internal DataTable QueryParameters
{
get { return _QueryParameters; }
set { _QueryParameters = value; }
}
internal XmlNode Node
{
get { return _Node; }
set { _Node = value; }
}
internal DataTable Fields
{
get { return _Fields; }
set { _Fields = value; }
}
override public string ToString()
{
return _Name;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System.Text;
using System.Management.Automation.Runspaces;
using System.Collections.ObjectModel;
namespace System.Management.Automation
{
/// <summary>
/// Provides information about a function that is stored in session state.
/// </summary>
public class FunctionInfo : CommandInfo, IScriptCommandInfo
{
#region ctor
/// <summary>
/// Creates an instance of the FunctionInfo class with the specified name and ScriptBlock.
/// </summary>
/// <param name="name">
/// The name of the function.
/// </param>
/// <param name="function">
/// The ScriptBlock for the function
/// </param>
/// <param name="context">
/// The execution context for the function.
/// </param>
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
internal FunctionInfo(string name, ScriptBlock function, ExecutionContext context) : this(name, function, context, null)
{
}
/// <summary>
/// Creates an instance of the FunctionInfo class with the specified name and ScriptBlock.
/// </summary>
/// <param name="name">
/// The name of the function.
/// </param>
/// <param name="function">
/// The ScriptBlock for the function
/// </param>
/// <param name="context">
/// The execution context for the function.
/// </param>
/// <param name="helpFile">
/// The name of the help file associated with the function.
/// </param>
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
internal FunctionInfo(string name, ScriptBlock function, ExecutionContext context, string helpFile) : base(name, CommandTypes.Function, context)
{
if (function == null)
{
throw PSTraceSource.NewArgumentNullException("function");
}
_scriptBlock = function;
CmdletInfo.SplitCmdletName(name, out _verb, out _noun);
this.Module = function.Module;
_helpFile = helpFile;
}
/// <summary>
/// Creates an instance of the FunctionInfo class with the specified name and ScriptBlock.
/// </summary>
/// <param name="name">
/// The name of the function.
/// </param>
/// <param name="function">
/// The ScriptBlock for the function
/// </param>
/// <param name="options">
/// The options to set on the function. Note, Constant can only be set at creation time.
/// </param>
/// <param name="context">
/// The execution context for the function.
/// </param>
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
internal FunctionInfo(string name, ScriptBlock function, ScopedItemOptions options, ExecutionContext context) : this(name, function, options, context, null)
{
}
/// <summary>
/// Creates an instance of the FunctionInfo class with the specified name and ScriptBlock.
/// </summary>
/// <param name="name">
/// The name of the function.
/// </param>
/// <param name="function">
/// The ScriptBlock for the function
/// </param>
/// <param name="options">
/// The options to set on the function. Note, Constant can only be set at creation time.
/// </param>
/// <param name="context">
/// The execution context for the function.
/// </param>
/// <param name="helpFile">
/// The name of the help file associated with the function.
/// </param>
/// <exception cref="ArgumentNullException">
/// If <paramref name="function"/> is null.
/// </exception>
internal FunctionInfo(string name, ScriptBlock function, ScopedItemOptions options, ExecutionContext context, string helpFile)
: this(name, function, context, helpFile)
{
_options = options;
}
/// <summary>
/// This is a copy constructor, used primarily for get-command.
/// </summary>
internal FunctionInfo(FunctionInfo other)
: base(other)
{
CopyFieldsFromOther(other);
}
private void CopyFieldsFromOther(FunctionInfo other)
{
_verb = other._verb;
_noun = other._noun;
_scriptBlock = other._scriptBlock;
_description = other._description;
_options = other._options;
_helpFile = other._helpFile;
}
/// <summary>
/// This is a copy constructor, used primarily for get-command.
/// </summary>
internal FunctionInfo(string name, FunctionInfo other)
: base(name, other)
{
CopyFieldsFromOther(other);
// Get the verb and noun from the name
CmdletInfo.SplitCmdletName(name, out _verb, out _noun);
}
/// <summary>
/// Create a copy of commandInfo for GetCommandCommand so that we can generate parameter
/// sets based on an argument list (so we can get the dynamic parameters.)
/// </summary>
internal override CommandInfo CreateGetCommandCopy(object[] arguments)
{
FunctionInfo copy = new FunctionInfo(this) { IsGetCommandCopy = true, Arguments = arguments };
return copy;
}
#endregion ctor
internal override HelpCategory HelpCategory
{
get { return HelpCategory.Function; }
}
/// <summary>
/// Gets the ScriptBlock which is the implementation of the function.
/// </summary>
public ScriptBlock ScriptBlock
{
get { return _scriptBlock; }
}
private ScriptBlock _scriptBlock;
/// <summary>
/// Updates a function.
/// </summary>
/// <param name="newFunction">
/// The script block that the function should represent.
/// </param>
/// <param name="force">
/// If true, the script block will be applied even if the filter is ReadOnly.
/// </param>
/// <param name="options">
/// Any options to set on the new function, null if none.
/// </param>
/// <exception cref="ArgumentNullException">
/// If <paramref name="newFunction"/> is null.
/// </exception>
internal void Update(ScriptBlock newFunction, bool force, ScopedItemOptions options)
{
Update(newFunction, force, options, null);
this.DefiningLanguageMode = newFunction.LanguageMode;
}
/// <summary/>
protected internal virtual void Update(FunctionInfo newFunction, bool force, ScopedItemOptions options, string helpFile)
{
Update(newFunction.ScriptBlock, force, options, helpFile);
}
/// <summary>
/// Updates a function.
/// </summary>
/// <param name="newFunction">
/// The script block that the function should represent.
/// </param>
/// <param name="force">
/// If true, the script block will be applied even if the filter is ReadOnly.
/// </param>
/// <param name="options">
/// Any options to set on the new function, null if none.
/// </param>
/// <param name="helpFile">
/// The helpfile for this function.
/// </param>
/// <exception cref="ArgumentNullException">
/// If <paramref name="newFunction"/> is null.
/// </exception>
internal void Update(ScriptBlock newFunction, bool force, ScopedItemOptions options, string helpFile)
{
if (newFunction == null)
{
throw PSTraceSource.NewArgumentNullException("function");
}
if ((_options & ScopedItemOptions.Constant) != 0)
{
SessionStateUnauthorizedAccessException e =
new SessionStateUnauthorizedAccessException(
Name,
SessionStateCategory.Function,
"FunctionIsConstant",
SessionStateStrings.FunctionIsConstant);
throw e;
}
if (!force && (_options & ScopedItemOptions.ReadOnly) != 0)
{
SessionStateUnauthorizedAccessException e =
new SessionStateUnauthorizedAccessException(
Name,
SessionStateCategory.Function,
"FunctionIsReadOnly",
SessionStateStrings.FunctionIsReadOnly);
throw e;
}
_scriptBlock = newFunction;
this.Module = newFunction.Module;
_commandMetadata = null;
this._parameterSets = null;
this.ExternalCommandMetadata = null;
if (options != ScopedItemOptions.Unspecified)
{
this.Options = options;
}
_helpFile = helpFile;
}
/// <summary>
/// Returns <c>true</c> if this function uses cmdlet binding mode for its parameters; otherwise returns <c>false</c>.
/// </summary>
public bool CmdletBinding
{
get
{
return this.ScriptBlock.UsesCmdletBinding;
}
}
/// <summary>
/// Gets the name of the default parameter set.
/// Returns <c>null</c> if this function doesn't use cmdlet parameter binding or if the default parameter set wasn't specified.
/// </summary>
public string DefaultParameterSet
{
get
{
return this.CmdletBinding ? this.CommandMetadata.DefaultParameterSetName : null;
}
}
/// <summary>
/// Gets the definition of the function which is the
/// ToString() of the ScriptBlock that implements the function.
/// </summary>
public override string Definition { get { return _scriptBlock.ToString(); } }
/// <summary>
/// Gets or sets the scope options for the function.
/// </summary>
/// <exception cref="SessionStateUnauthorizedAccessException">
/// If the trying to set a function that is constant or
/// if the value trying to be set is ScopedItemOptions.Constant
/// </exception>
public ScopedItemOptions Options
{
get
{
return CopiedCommand == null ? _options : ((FunctionInfo)CopiedCommand).Options;
}
set
{
if (CopiedCommand == null)
{
// Check to see if the function is constant, if so
// throw an exception because the options cannot be changed.
if ((_options & ScopedItemOptions.Constant) != 0)
{
SessionStateUnauthorizedAccessException e =
new SessionStateUnauthorizedAccessException(
Name,
SessionStateCategory.Function,
"FunctionIsConstant",
SessionStateStrings.FunctionIsConstant);
throw e;
}
// Now check to see if the caller is trying to set
// the options to constant. This is only allowed at
// variable creation
if ((value & ScopedItemOptions.Constant) != 0)
{
// user is trying to set the function to constant after
// creating the function. Do not allow this (as per spec).
SessionStateUnauthorizedAccessException e =
new SessionStateUnauthorizedAccessException(
Name,
SessionStateCategory.Function,
"FunctionCannotBeMadeConstant",
SessionStateStrings.FunctionCannotBeMadeConstant);
throw e;
}
// Ensure we are not trying to remove the AllScope option
if ((value & ScopedItemOptions.AllScope) == 0 &&
(_options & ScopedItemOptions.AllScope) != 0)
{
SessionStateUnauthorizedAccessException e =
new SessionStateUnauthorizedAccessException(
this.Name,
SessionStateCategory.Function,
"FunctionAllScopeOptionCannotBeRemoved",
SessionStateStrings.FunctionAllScopeOptionCannotBeRemoved);
throw e;
}
_options = value;
}
else
{
((FunctionInfo)CopiedCommand).Options = value;
}
}
}
private ScopedItemOptions _options = ScopedItemOptions.None;
/// <summary>
/// Gets or sets the description associated with the function.
/// </summary>
public string Description
{
get
{
return CopiedCommand == null ? _description : ((FunctionInfo)CopiedCommand).Description;
}
set
{
if (CopiedCommand == null)
{
_description = value;
}
else
{
((FunctionInfo)CopiedCommand).Description = value;
}
}
}
private string _description = null;
/// <summary>
/// Gets the verb of the function.
/// </summary>
public string Verb
{
get
{
return _verb;
}
}
private string _verb = string.Empty;
/// <summary>
/// Gets the noun of the function.
/// </summary>
public string Noun
{
get
{
return _noun;
}
}
private string _noun = string.Empty;
/// <summary>
/// Gets the help file path for the function.
/// </summary>
public string HelpFile
{
get
{
return _helpFile;
}
internal set
{
_helpFile = value;
}
}
private string _helpFile = string.Empty;
/// <summary>
/// Returns the syntax of a command.
/// </summary>
internal override string Syntax
{
get
{
StringBuilder synopsis = new StringBuilder();
foreach (CommandParameterSetInfo parameterSet in ParameterSets)
{
synopsis.AppendLine();
synopsis.AppendLine(
string.Format(
Globalization.CultureInfo.CurrentCulture,
"{0} {1}",
Name,
parameterSet.ToString()));
}
return synopsis.ToString();
}
}
/// <summary>
/// True if the command has dynamic parameters, false otherwise.
/// </summary>
internal override bool ImplementsDynamicParameters
{
get { return ScriptBlock.HasDynamicParameters; }
}
/// <summary>
/// The command metadata for the function or filter.
/// </summary>
internal override CommandMetadata CommandMetadata
{
get
{
return _commandMetadata ??
(_commandMetadata =
new CommandMetadata(this.ScriptBlock, this.Name, LocalPipeline.GetExecutionContextFromTLS()));
}
}
private CommandMetadata _commandMetadata;
/// <summary>
/// The output type(s) is specified in the script block.
/// </summary>
public override ReadOnlyCollection<PSTypeName> OutputType
{
get { return ScriptBlock.OutputType; }
}
}
}
| |
using Lucene.Net.Support;
using System.Diagnostics;
namespace Lucene.Net.Codecs.Lucene41
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using IndexInput = Lucene.Net.Store.IndexInput;
/// <summary>
/// Implements the skip list reader for block postings format
/// that stores positions and payloads.
/// <para/>
/// Although this skipper uses MultiLevelSkipListReader as an interface,
/// its definition of skip position will be a little different.
/// <para/>
/// For example, when skipInterval = blockSize = 3, df = 2*skipInterval = 6,
/// <para/>
/// 0 1 2 3 4 5
/// d d d d d d (posting list)
/// ^ ^ (skip point in MultiLeveSkipWriter)
/// ^ (skip point in Lucene41SkipWriter)
/// <para/>
/// In this case, MultiLevelSkipListReader will use the last document as a skip point,
/// while Lucene41SkipReader should assume no skip point will comes.
/// <para/>
/// If we use the interface directly in Lucene41SkipReader, it may silly try to read
/// another skip data after the only skip point is loaded.
/// <para/>
/// To illustrate this, we can call skipTo(d[5]), since skip point d[3] has smaller docId,
/// and numSkipped+blockSize== df, the MultiLevelSkipListReader will assume the skip list
/// isn't exhausted yet, and try to load a non-existed skip point
/// <para/>
/// Therefore, we'll trim df before passing it to the interface. see <see cref="Trim(int)"/>.
/// </summary>
internal sealed class Lucene41SkipReader : MultiLevelSkipListReader
{
// private boolean DEBUG = Lucene41PostingsReader.DEBUG;
private readonly int blockSize;
private long[] docPointer;
private long[] posPointer;
private long[] payPointer;
private int[] posBufferUpto;
private int[] payloadByteUpto;
private long lastPosPointer;
private long lastPayPointer;
private int lastPayloadByteUpto;
private long lastDocPointer;
private int lastPosBufferUpto;
public Lucene41SkipReader(IndexInput skipStream, int maxSkipLevels, int blockSize, bool hasPos, bool hasOffsets, bool hasPayloads)
: base(skipStream, maxSkipLevels, blockSize, 8)
{
this.blockSize = blockSize;
docPointer = new long[maxSkipLevels];
if (hasPos)
{
posPointer = new long[maxSkipLevels];
posBufferUpto = new int[maxSkipLevels];
if (hasPayloads)
{
payloadByteUpto = new int[maxSkipLevels];
}
else
{
payloadByteUpto = null;
}
if (hasOffsets || hasPayloads)
{
payPointer = new long[maxSkipLevels];
}
else
{
payPointer = null;
}
}
else
{
posPointer = null;
}
}
/// <summary>
/// Trim original docFreq to tell skipReader read proper number of skip points.
/// <para/>
/// Since our definition in Lucene41Skip* is a little different from MultiLevelSkip*
/// this trimmed docFreq will prevent skipReader from:
/// 1. silly reading a non-existed skip point after the last block boundary
/// 2. moving into the vInt block
/// </summary>
internal int Trim(int df)
{
return df % blockSize == 0 ? df - 1 : df;
}
public void Init(long skipPointer, long docBasePointer, long posBasePointer, long payBasePointer, int df)
{
base.Init(skipPointer, Trim(df));
lastDocPointer = docBasePointer;
lastPosPointer = posBasePointer;
lastPayPointer = payBasePointer;
Arrays.Fill(docPointer, docBasePointer);
if (posPointer != null)
{
Arrays.Fill(posPointer, posBasePointer);
if (payPointer != null)
{
Arrays.Fill(payPointer, payBasePointer);
}
}
else
{
Debug.Assert(posBasePointer == 0);
}
}
/// <summary>
/// Returns the doc pointer of the doc to which the last call of
/// <seealso cref="MultiLevelSkipListReader.SkipTo(int)"/> has skipped.
/// </summary>
public long DocPointer => lastDocPointer;
public long PosPointer => lastPosPointer;
public int PosBufferUpto => lastPosBufferUpto;
public long PayPointer => lastPayPointer;
public int PayloadByteUpto => lastPayloadByteUpto;
public int NextSkipDoc => m_skipDoc[0];
protected override void SeekChild(int level)
{
base.SeekChild(level);
// if (DEBUG) {
// System.out.println("seekChild level=" + level);
// }
docPointer[level] = lastDocPointer;
if (posPointer != null)
{
posPointer[level] = lastPosPointer;
posBufferUpto[level] = lastPosBufferUpto;
if (payloadByteUpto != null)
{
payloadByteUpto[level] = lastPayloadByteUpto;
}
if (payPointer != null)
{
payPointer[level] = lastPayPointer;
}
}
}
protected override void SetLastSkipData(int level)
{
base.SetLastSkipData(level);
lastDocPointer = docPointer[level];
// if (DEBUG) {
// System.out.println("setLastSkipData level=" + value);
// System.out.println(" lastDocPointer=" + lastDocPointer);
// }
if (posPointer != null)
{
lastPosPointer = posPointer[level];
lastPosBufferUpto = posBufferUpto[level];
// if (DEBUG) {
// System.out.println(" lastPosPointer=" + lastPosPointer + " lastPosBUfferUpto=" + lastPosBufferUpto);
// }
if (payPointer != null)
{
lastPayPointer = payPointer[level];
}
if (payloadByteUpto != null)
{
lastPayloadByteUpto = payloadByteUpto[level];
}
}
}
protected override int ReadSkipData(int level, IndexInput skipStream)
{
// if (DEBUG) {
// System.out.println("readSkipData level=" + level);
// }
int delta = skipStream.ReadVInt32();
// if (DEBUG) {
// System.out.println(" delta=" + delta);
// }
docPointer[level] += skipStream.ReadVInt32();
// if (DEBUG) {
// System.out.println(" docFP=" + docPointer[level]);
// }
if (posPointer != null)
{
posPointer[level] += skipStream.ReadVInt32();
// if (DEBUG) {
// System.out.println(" posFP=" + posPointer[level]);
// }
posBufferUpto[level] = skipStream.ReadVInt32();
// if (DEBUG) {
// System.out.println(" posBufferUpto=" + posBufferUpto[level]);
// }
if (payloadByteUpto != null)
{
payloadByteUpto[level] = skipStream.ReadVInt32();
}
if (payPointer != null)
{
payPointer[level] += skipStream.ReadVInt32();
}
}
return delta;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Composition;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.CodeRefactorings;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.CSharp.Utilities;
using Microsoft.CodeAnalysis.FindSymbols;
using Microsoft.CodeAnalysis.Formatting;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Simplification;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp.CodeRefactorings.InlineTemporary
{
[ExportCodeRefactoringProvider(LanguageNames.CSharp, Name = PredefinedCodeRefactoringProviderNames.InlineTemporary), Shared]
internal partial class InlineTemporaryCodeRefactoringProvider : CodeRefactoringProvider
{
internal static readonly SyntaxAnnotation DefinitionAnnotation = new SyntaxAnnotation();
internal static readonly SyntaxAnnotation ReferenceAnnotation = new SyntaxAnnotation();
internal static readonly SyntaxAnnotation InitializerAnnotation = new SyntaxAnnotation();
internal static readonly SyntaxAnnotation ExpressionToInlineAnnotation = new SyntaxAnnotation();
public override async Task ComputeRefactoringsAsync(CodeRefactoringContext context)
{
var document = context.Document;
var textSpan = context.Span;
var cancellationToken = context.CancellationToken;
if (document.Project.Solution.Workspace.Kind == WorkspaceKind.MiscellaneousFiles)
{
return;
}
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var token = root.FindToken(textSpan.Start);
if (!token.Span.Contains(textSpan))
{
return;
}
var node = token.Parent;
if (!node.IsKind(SyntaxKind.VariableDeclarator) ||
!node.IsParentKind(SyntaxKind.VariableDeclaration) ||
!node.Parent.IsParentKind(SyntaxKind.LocalDeclarationStatement))
{
return;
}
var variableDeclarator = (VariableDeclaratorSyntax)node;
var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent;
var localDeclarationStatement = (LocalDeclarationStatementSyntax)variableDeclaration.Parent;
if (variableDeclarator.Identifier != token ||
variableDeclarator.Initializer == null ||
variableDeclarator.Initializer.Value.IsMissing ||
variableDeclarator.Initializer.Value.IsKind(SyntaxKind.StackAllocArrayCreationExpression))
{
return;
}
if (localDeclarationStatement.ContainsDiagnostics)
{
return;
}
var references = await GetReferencesAsync(document, variableDeclarator, cancellationToken).ConfigureAwait(false);
if (!references.Any())
{
return;
}
context.RegisterRefactoring(
new MyCodeAction(
CSharpFeaturesResources.InlineTemporaryVariable,
(c) => this.InlineTemporaryAsync(document, variableDeclarator, c)));
}
private async Task<IEnumerable<ReferenceLocation>> GetReferencesAsync(
Document document,
VariableDeclaratorSyntax variableDeclarator,
CancellationToken cancellationToken)
{
var semanticModel = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var local = semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken);
if (local != null)
{
var findReferencesResult = await SymbolFinder.FindReferencesAsync(local, document.Project.Solution, cancellationToken).ConfigureAwait(false);
var locations = findReferencesResult.Single(r => r.Definition == local).Locations;
if (!locations.Any(loc => semanticModel.SyntaxTree.OverlapsHiddenPosition(loc.Location.SourceSpan, cancellationToken)))
{
return locations;
}
}
return SpecializedCollections.EmptyEnumerable<ReferenceLocation>();
}
private static bool HasConflict(IdentifierNameSyntax identifier, VariableDeclaratorSyntax variableDeclarator)
{
// TODO: Check for more conflict types.
if (identifier.SpanStart < variableDeclarator.SpanStart)
{
return true;
}
var identifierNode = identifier
.Ancestors()
.TakeWhile(n => n.Kind() == SyntaxKind.ParenthesizedExpression || n.Kind() == SyntaxKind.CastExpression)
.LastOrDefault();
if (identifierNode == null)
{
identifierNode = identifier;
}
if (identifierNode.IsParentKind(SyntaxKind.Argument))
{
var argument = (ArgumentSyntax)identifierNode.Parent;
if (argument.RefOrOutKeyword.Kind() != SyntaxKind.None)
{
return true;
}
}
else if (identifierNode.Parent.IsKind(
SyntaxKind.PreDecrementExpression,
SyntaxKind.PreIncrementExpression,
SyntaxKind.PostDecrementExpression,
SyntaxKind.PostIncrementExpression,
SyntaxKind.AddressOfExpression))
{
return true;
}
else if (identifierNode.Parent is AssignmentExpressionSyntax)
{
var binaryExpression = (AssignmentExpressionSyntax)identifierNode.Parent;
if (binaryExpression.Left == identifierNode)
{
return true;
}
}
return false;
}
private static SyntaxAnnotation CreateConflictAnnotation()
{
return ConflictAnnotation.Create(CSharpFeaturesResources.ConflictsDetected);
}
private async Task<Document> InlineTemporaryAsync(Document document, VariableDeclaratorSyntax declarator, CancellationToken cancellationToken)
{
var workspace = document.Project.Solution.Workspace;
// Annotate the variable declarator so that we can get back to it later.
var updatedDocument = await document.ReplaceNodeAsync(declarator, declarator.WithAdditionalAnnotations(DefinitionAnnotation), cancellationToken).ConfigureAwait(false);
var semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
// Create the expression that we're actually going to inline.
var expressionToInline = await CreateExpressionToInlineAsync(variableDeclarator, updatedDocument, cancellationToken).ConfigureAwait(false);
// Collect the identifier names for each reference.
var local = (ILocalSymbol)semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken);
var symbolRefs = await SymbolFinder.FindReferencesAsync(local, updatedDocument.Project.Solution, cancellationToken).ConfigureAwait(false);
var references = symbolRefs.Single(r => r.Definition == local).Locations;
var syntaxRoot = await updatedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
// Collect the topmost parenting expression for each reference.
var nonConflictingIdentifierNodes = references
.Select(loc => (IdentifierNameSyntax)syntaxRoot.FindToken(loc.Location.SourceSpan.Start).Parent)
.Where(ident => !HasConflict(ident, variableDeclarator));
// Add referenceAnnotations to identifier nodes being replaced.
updatedDocument = await updatedDocument.ReplaceNodesAsync(
nonConflictingIdentifierNodes,
(o, n) => n.WithAdditionalAnnotations(ReferenceAnnotation),
cancellationToken).ConfigureAwait(false);
semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
// Get the annotated reference nodes.
nonConflictingIdentifierNodes = await FindReferenceAnnotatedNodesAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
var topmostParentingExpressions = nonConflictingIdentifierNodes
.Select(ident => GetTopMostParentingExpression(ident))
.Distinct();
var originalInitializerSymbolInfo = semanticModel.GetSymbolInfo(variableDeclarator.Initializer.Value, cancellationToken);
// Make each topmost parenting statement or Equals Clause Expressions semantically explicit.
updatedDocument = await updatedDocument.ReplaceNodesAsync(topmostParentingExpressions, (o, n) => Simplifier.Expand(n, semanticModel, workspace, cancellationToken: cancellationToken), cancellationToken).ConfigureAwait(false);
semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var semanticModelBeforeInline = semanticModel;
variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
var scope = GetScope(variableDeclarator);
var newScope = ReferenceRewriter.Visit(semanticModel, scope, variableDeclarator, expressionToInline, cancellationToken);
updatedDocument = await updatedDocument.ReplaceNodeAsync(scope, newScope, cancellationToken).ConfigureAwait(false);
semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
variableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
newScope = GetScope(variableDeclarator);
var conflicts = newScope.GetAnnotatedNodesAndTokens(ConflictAnnotation.Kind);
var declaratorConflicts = variableDeclarator.GetAnnotatedNodesAndTokens(ConflictAnnotation.Kind);
// Note that we only remove the local declaration if there weren't any conflicts,
// unless those conflicts are inside the local declaration.
if (conflicts.Count() == declaratorConflicts.Count())
{
// Certain semantic conflicts can be detected only after the reference rewriter has inlined the expression
var newDocument = await DetectSemanticConflicts(updatedDocument,
semanticModel,
semanticModelBeforeInline,
originalInitializerSymbolInfo,
cancellationToken).ConfigureAwait(false);
if (updatedDocument == newDocument)
{
// No semantic conflicts, we can remove the definition.
updatedDocument = await updatedDocument.ReplaceNodeAsync(newScope, RemoveDeclaratorFromScope(variableDeclarator, newScope), cancellationToken).ConfigureAwait(false);
}
else
{
// There were some semantic conflicts, don't remove the definition.
updatedDocument = newDocument;
}
}
return updatedDocument;
}
private static async Task<VariableDeclaratorSyntax> FindDeclaratorAsync(Document document, CancellationToken cancellationToken)
{
return await FindNodeWithAnnotationAsync<VariableDeclaratorSyntax>(document, DefinitionAnnotation, cancellationToken).ConfigureAwait(false);
}
private static async Task<ExpressionSyntax> FindInitializerAsync(Document document, CancellationToken cancellationToken)
{
return await FindNodeWithAnnotationAsync<ExpressionSyntax>(document, InitializerAnnotation, cancellationToken).ConfigureAwait(false);
}
private static async Task<T> FindNodeWithAnnotationAsync<T>(Document document, SyntaxAnnotation annotation, CancellationToken cancellationToken)
where T : SyntaxNode
{
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
return root
.GetAnnotatedNodesAndTokens(annotation)
.Single()
.AsNode() as T;
}
private static async Task<IEnumerable<IdentifierNameSyntax>> FindReferenceAnnotatedNodesAsync(Document document, CancellationToken cancellationToken)
{
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
return FindReferenceAnnotatedNodes(root);
}
private static IEnumerable<IdentifierNameSyntax> FindReferenceAnnotatedNodes(SyntaxNode root)
{
var annotatedNodesAndTokens = root.GetAnnotatedNodesAndTokens(ReferenceAnnotation);
foreach (var nodeOrToken in annotatedNodesAndTokens)
{
if (nodeOrToken.IsNode && nodeOrToken.AsNode().IsKind(SyntaxKind.IdentifierName))
{
yield return (IdentifierNameSyntax)nodeOrToken.AsNode();
}
}
}
private SyntaxNode GetScope(VariableDeclaratorSyntax variableDeclarator)
{
var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent;
var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent;
var scope = localDeclaration.Parent;
while (scope.IsKind(SyntaxKind.LabeledStatement))
{
scope = scope.Parent;
}
var parentExpressions = scope.AncestorsAndSelf().OfType<ExpressionSyntax>();
if (parentExpressions.Any())
{
scope = parentExpressions.LastOrDefault().Parent;
}
return scope;
}
private VariableDeclaratorSyntax FindDeclarator(SyntaxNode node)
{
var annotatedNodesOrTokens = node.GetAnnotatedNodesAndTokens(DefinitionAnnotation).ToList();
Contract.Requires(annotatedNodesOrTokens.Count == 1, "Only a single variable declarator should have been annotated.");
return (VariableDeclaratorSyntax)annotatedNodesOrTokens.First().AsNode();
}
private SyntaxTriviaList GetTriviaToPreserve(SyntaxTriviaList syntaxTriviaList)
{
return ShouldPreserve(syntaxTriviaList) ? syntaxTriviaList : default(SyntaxTriviaList);
}
private static bool ShouldPreserve(SyntaxTriviaList trivia)
{
return trivia.Any(
t => t.Kind() == SyntaxKind.SingleLineCommentTrivia ||
t.Kind() == SyntaxKind.MultiLineCommentTrivia ||
t.IsDirective);
}
private SyntaxNode RemoveDeclaratorFromVariableList(VariableDeclaratorSyntax variableDeclarator, VariableDeclarationSyntax variableDeclaration)
{
Debug.Assert(variableDeclaration.Variables.Count > 1);
Debug.Assert(variableDeclaration.Variables.Contains(variableDeclarator));
var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent;
var scope = GetScope(variableDeclarator);
var newLocalDeclaration = localDeclaration.RemoveNode(variableDeclarator, SyntaxRemoveOptions.KeepNoTrivia)
.WithAdditionalAnnotations(Formatter.Annotation);
return scope.ReplaceNode(localDeclaration, newLocalDeclaration);
}
private SyntaxNode RemoveDeclaratorFromScope(VariableDeclaratorSyntax variableDeclarator, SyntaxNode scope)
{
var variableDeclaration = (VariableDeclarationSyntax)variableDeclarator.Parent;
// If there is more than one variable declarator, remove this one from the variable declaration.
if (variableDeclaration.Variables.Count > 1)
{
return RemoveDeclaratorFromVariableList(variableDeclarator, variableDeclaration);
}
var localDeclaration = (LocalDeclarationStatementSyntax)variableDeclaration.Parent;
// There's only one variable declarator, so we'll remove the local declaration
// statement entirely. This means that we'll concatenate the leading and trailing
// trivia of this declaration and move it to the next statement.
var leadingTrivia = localDeclaration
.GetLeadingTrivia()
.Reverse()
.SkipWhile(t => t.MatchesKind(SyntaxKind.WhitespaceTrivia))
.Reverse()
.ToSyntaxTriviaList();
var trailingTrivia = localDeclaration
.GetTrailingTrivia()
.SkipWhile(t => t.MatchesKind(SyntaxKind.WhitespaceTrivia, SyntaxKind.EndOfLineTrivia))
.ToSyntaxTriviaList();
var newLeadingTrivia = leadingTrivia.Concat(trailingTrivia);
var nextToken = localDeclaration.GetLastToken().GetNextTokenOrEndOfFile();
var newNextToken = nextToken.WithPrependedLeadingTrivia(newLeadingTrivia)
.WithAdditionalAnnotations(Formatter.Annotation);
var newScope = scope.ReplaceToken(nextToken, newNextToken);
var newLocalDeclaration = (LocalDeclarationStatementSyntax)FindDeclarator(newScope).Parent.Parent;
// If the local is parented by a label statement, we can't remove this statement. Instead,
// we'll replace the local declaration with an empty expression statement.
if (newLocalDeclaration.IsParentKind(SyntaxKind.LabeledStatement))
{
var labeledStatement = (LabeledStatementSyntax)newLocalDeclaration.Parent;
var newLabeledStatement = labeledStatement.ReplaceNode(newLocalDeclaration, SyntaxFactory.ParseStatement(""));
return newScope.ReplaceNode(labeledStatement, newLabeledStatement);
}
return newScope.RemoveNode(newLocalDeclaration, SyntaxRemoveOptions.KeepNoTrivia);
}
private ExpressionSyntax SkipRedundantExteriorParentheses(ExpressionSyntax expression)
{
while (expression.IsKind(SyntaxKind.ParenthesizedExpression))
{
var parenthesized = (ParenthesizedExpressionSyntax)expression;
if (parenthesized.Expression == null ||
parenthesized.Expression.IsMissing)
{
break;
}
if (parenthesized.Expression.IsKind(SyntaxKind.ParenthesizedExpression) ||
parenthesized.Expression.IsKind(SyntaxKind.IdentifierName))
{
expression = parenthesized.Expression;
}
else
{
break;
}
}
return expression;
}
private async Task<ExpressionSyntax> CreateExpressionToInlineAsync(
VariableDeclaratorSyntax variableDeclarator,
Document document,
CancellationToken cancellationToken)
{
var updatedDocument = document;
var expression = SkipRedundantExteriorParentheses(variableDeclarator.Initializer.Value);
var semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
var localSymbol = (ILocalSymbol)semanticModel.GetDeclaredSymbol(variableDeclarator, cancellationToken);
var newExpression = InitializerRewriter.Visit(expression, localSymbol, semanticModel);
// If this is an array initializer, we need to transform it into an array creation
// expression for inlining.
if (newExpression.Kind() == SyntaxKind.ArrayInitializerExpression)
{
var arrayType = (ArrayTypeSyntax)localSymbol.Type.GenerateTypeSyntax();
var arrayInitializer = (InitializerExpressionSyntax)newExpression;
// Add any non-whitespace trailing trivia from the equals clause to the type.
var equalsToken = variableDeclarator.Initializer.EqualsToken;
if (equalsToken.HasTrailingTrivia)
{
var trailingTrivia = equalsToken.TrailingTrivia.SkipInitialWhitespace();
if (trailingTrivia.Any())
{
arrayType = arrayType.WithTrailingTrivia(trailingTrivia);
}
}
newExpression = SyntaxFactory.ArrayCreationExpression(arrayType, arrayInitializer);
}
newExpression = newExpression.WithAdditionalAnnotations(InitializerAnnotation);
updatedDocument = await updatedDocument.ReplaceNodeAsync(variableDeclarator.Initializer.Value, newExpression, cancellationToken).ConfigureAwait(false);
semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
newExpression = await FindInitializerAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
var newVariableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
localSymbol = (ILocalSymbol)semanticModel.GetDeclaredSymbol(newVariableDeclarator, cancellationToken);
bool wasCastAdded;
var explicitCastExpression = newExpression.CastIfPossible(localSymbol.Type, newVariableDeclarator.SpanStart, semanticModel, out wasCastAdded);
if (wasCastAdded)
{
updatedDocument = await updatedDocument.ReplaceNodeAsync(newExpression, explicitCastExpression, cancellationToken).ConfigureAwait(false);
semanticModel = await updatedDocument.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
newVariableDeclarator = await FindDeclaratorAsync(updatedDocument, cancellationToken).ConfigureAwait(false);
}
// Now that the variable declarator is normalized, make its initializer
// value semantically explicit.
newExpression = await Simplifier.ExpandAsync(newVariableDeclarator.Initializer.Value, updatedDocument, cancellationToken: cancellationToken).ConfigureAwait(false);
return newExpression.WithAdditionalAnnotations(ExpressionToInlineAnnotation);
}
private static SyntaxNode GetTopMostParentingExpression(ExpressionSyntax expression)
{
return expression.AncestorsAndSelf().OfType<ExpressionSyntax>().Last();
}
private static async Task<Document> DetectSemanticConflicts(
Document inlinedDocument,
SemanticModel newSemanticModelForInlinedDocument,
SemanticModel semanticModelBeforeInline,
SymbolInfo originalInitializerSymbolInfo,
CancellationToken cancellationToken)
{
// In this method we detect if inlining the expression introduced the following semantic change:
// The symbol info associated with any of the inlined expressions does not match the symbol info for original initializer expression prior to inline.
// If any semantic changes were introduced by inlining, we update the document with conflict annotations.
// Otherwise we return the given inlined document without any changes.
var syntaxRootBeforeInline = await semanticModelBeforeInline.SyntaxTree.GetRootAsync(cancellationToken).ConfigureAwait(false);
// Get all the identifier nodes which were replaced with inlined expression.
var originalIdentifierNodes = FindReferenceAnnotatedNodes(syntaxRootBeforeInline);
if (originalIdentifierNodes.IsEmpty())
{
// No conflicts
return inlinedDocument;
}
// Get all the inlined expression nodes.
var syntaxRootAfterInline = await inlinedDocument.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var inlinedExprNodes = syntaxRootAfterInline.GetAnnotatedNodesAndTokens(ExpressionToInlineAnnotation);
Debug.Assert(originalIdentifierNodes.Count() == inlinedExprNodes.Count());
Dictionary<SyntaxNode, SyntaxNode> replacementNodesWithChangedSemantics = null;
using (var originalNodesEnum = originalIdentifierNodes.GetEnumerator())
{
using (var inlinedNodesOrTokensEnum = inlinedExprNodes.GetEnumerator())
{
while (originalNodesEnum.MoveNext())
{
inlinedNodesOrTokensEnum.MoveNext();
var originalNode = originalNodesEnum.Current;
// expressionToInline is Parenthesized prior to replacement, so get the parenting parenthesized expression.
var inlinedNode = (ExpressionSyntax)inlinedNodesOrTokensEnum.Current.Parent;
Debug.Assert(inlinedNode.IsKind(SyntaxKind.ParenthesizedExpression));
// inlinedNode is the expanded form of the actual initializer expression in the original document.
// We have annotated the inner initializer with a special syntax annotation "InitializerAnnotation".
// Get this annotated node and compute the symbol info for this node in the inlined document.
var innerInitializerInInlineNodeOrToken = inlinedNode.GetAnnotatedNodesAndTokens(InitializerAnnotation).First();
ExpressionSyntax innerInitializerInInlineNode = (ExpressionSyntax)(innerInitializerInInlineNodeOrToken.IsNode ?
innerInitializerInInlineNodeOrToken.AsNode() :
innerInitializerInInlineNodeOrToken.AsToken().Parent);
var newInitializerSymbolInfo = newSemanticModelForInlinedDocument.GetSymbolInfo(innerInitializerInInlineNode, cancellationToken);
// Verification: The symbol info associated with any of the inlined expressions does not match the symbol info for original initializer expression prior to inline.
if (!SpeculationAnalyzer.SymbolInfosAreCompatible(originalInitializerSymbolInfo, newInitializerSymbolInfo, performEquivalenceCheck: true))
{
newInitializerSymbolInfo = newSemanticModelForInlinedDocument.GetSymbolInfo(inlinedNode, cancellationToken);
if (!SpeculationAnalyzer.SymbolInfosAreCompatible(originalInitializerSymbolInfo, newInitializerSymbolInfo, performEquivalenceCheck: true))
{
if (replacementNodesWithChangedSemantics == null)
{
replacementNodesWithChangedSemantics = new Dictionary<SyntaxNode, SyntaxNode>();
}
replacementNodesWithChangedSemantics.Add(inlinedNode, originalNode);
}
}
}
}
}
if (replacementNodesWithChangedSemantics == null)
{
// No conflicts.
return inlinedDocument;
}
// Replace the conflicting inlined nodes with the original nodes annotated with conflict annotation.
Func<SyntaxNode, SyntaxNode, SyntaxNode> conflictAnnotationAdder =
(SyntaxNode oldNode, SyntaxNode newNode) =>
newNode.WithAdditionalAnnotations(ConflictAnnotation.Create(CSharpFeaturesResources.ConflictsDetected));
return await inlinedDocument.ReplaceNodesAsync(replacementNodesWithChangedSemantics.Keys, conflictAnnotationAdder, cancellationToken).ConfigureAwait(false);
}
private class MyCodeAction : CodeAction.DocumentChangeAction
{
public MyCodeAction(string title, Func<CancellationToken, Task<Document>> createChangedDocument) :
base(title, createChangedDocument)
{
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Data;
using System.Reflection;
using System.Collections.Generic;
using log4net;
using MySql.Data.MySqlClient;
using OpenMetaverse;
using OpenSim.Framework;
namespace OpenSim.Data.MySQL
{
/// <summary>
/// A MySQL Interface for the Asset Server
/// </summary>
public class MySQLXInventoryData : IXInventoryData
{
private MySQLGenericTableHandler<XInventoryFolder> m_Folders;
private MySqlItemHandler m_Items;
public MySQLXInventoryData(string conn, string realm)
{
m_Folders = new MySQLGenericTableHandler<XInventoryFolder>(
conn, "inventoryfolders", "InventoryStore");
m_Items = new MySqlItemHandler(
conn, "inventoryitems", String.Empty);
}
public XInventoryFolder[] GetFolders(string[] fields, string[] vals)
{
return m_Folders.Get(fields, vals);
}
public XInventoryItem[] GetItems(string[] fields, string[] vals)
{
return m_Items.Get(fields, vals);
}
public bool StoreFolder(XInventoryFolder folder)
{
if (folder.folderName.Length > 64)
folder.folderName = folder.folderName.Substring(0, 64);
return m_Folders.Store(folder);
}
public bool StoreItem(XInventoryItem item)
{
if (item.inventoryName.Length > 64)
item.inventoryName = item.inventoryName.Substring(0, 64);
if (item.inventoryDescription.Length > 128)
item.inventoryDescription = item.inventoryDescription.Substring(0, 128);
return m_Items.Store(item);
}
public bool DeleteFolders(string field, string val)
{
return m_Folders.Delete(field, val);
}
public bool DeleteItems(string field, string val)
{
return m_Items.Delete(field, val);
}
public bool MoveItem(string id, string newParent)
{
return m_Items.MoveItem(id, newParent);
}
public XInventoryItem[] GetActiveGestures(UUID principalID)
{
return m_Items.GetActiveGestures(principalID);
}
public int GetAssetPermissions(UUID principalID, UUID assetID)
{
return m_Items.GetAssetPermissions(principalID, assetID);
}
}
public class MySqlItemHandler : MySQLGenericTableHandler<XInventoryItem>
{
public MySqlItemHandler(string c, string t, string m) :
base(c, t, m)
{
}
public bool MoveItem(string id, string newParent)
{
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.CommandText = String.Format("update {0} set parentFolderID = ?ParentFolderID where inventoryID = ?InventoryID", m_Realm);
cmd.Parameters.AddWithValue("?ParentFolderID", newParent);
cmd.Parameters.AddWithValue("?InventoryID", id);
return ExecuteNonQuery(cmd) == 0 ? false : true;
}
}
public XInventoryItem[] GetActiveGestures(UUID principalID)
{
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.CommandText = String.Format("select * from inventoryitems where avatarId = ?uuid and assetType = ?type and flags = 1", m_Realm);
cmd.Parameters.AddWithValue("?uuid", principalID.ToString());
cmd.Parameters.AddWithValue("?type", (int)AssetType.Gesture);
return DoQuery(cmd);
}
}
public int GetAssetPermissions(UUID principalID, UUID assetID)
{
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.Connection = dbcon;
cmd.CommandText = String.Format("select bit_or(inventoryCurrentPermissions) as inventoryCurrentPermissions from inventoryitems where avatarID = ?PrincipalID and assetID = ?AssetID group by assetID", m_Realm);
cmd.Parameters.AddWithValue("?PrincipalID", principalID.ToString());
cmd.Parameters.AddWithValue("?AssetID", assetID.ToString());
using (IDataReader reader = cmd.ExecuteReader())
{
int perms = 0;
if (reader.Read())
{
perms = Convert.ToInt32(reader["inventoryCurrentPermissions"]);
}
return perms;
}
}
}
}
public override bool Store(XInventoryItem item)
{
if (!base.Store(item))
return false;
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.Connection = dbcon;
cmd.CommandText = String.Format("update inventoryfolders set version=version+1 where folderID = ?folderID");
cmd.Parameters.AddWithValue("?folderID", item.parentFolderID.ToString());
try
{
cmd.ExecuteNonQuery();
}
catch (Exception e)
{
return false;
}
cmd.Dispose();
}
dbcon.Close();
}
return true;
}
}
}
| |
using Microsoft.IdentityModel.S2S.Protocols.OAuth2;
using Microsoft.IdentityModel.Tokens;
using Microsoft.SharePoint.Client;
using System;
using System.Net;
using System.Security.Principal;
using System.Web;
using System.Web.Configuration;
namespace ModifyPagesWeb
{
/// <summary>
/// Encapsulates all the information from SharePoint.
/// </summary>
public abstract class SharePointContext
{
public const string SPHostUrlKey = "SPHostUrl";
public const string SPAppWebUrlKey = "SPAppWebUrl";
public const string SPLanguageKey = "SPLanguage";
public const string SPClientTagKey = "SPClientTag";
public const string SPProductNumberKey = "SPProductNumber";
protected static readonly TimeSpan AccessTokenLifetimeTolerance = TimeSpan.FromMinutes(5.0);
private readonly Uri spHostUrl;
private readonly Uri spAppWebUrl;
private readonly string spLanguage;
private readonly string spClientTag;
private readonly string spProductNumber;
// <AccessTokenString, UtcExpiresOn>
protected Tuple<string, DateTime> userAccessTokenForSPHost;
protected Tuple<string, DateTime> userAccessTokenForSPAppWeb;
protected Tuple<string, DateTime> appOnlyAccessTokenForSPHost;
protected Tuple<string, DateTime> appOnlyAccessTokenForSPAppWeb;
/// <summary>
/// Gets the SharePoint host url from QueryString of the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The specified HTTP request.</param>
/// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns>
public static Uri GetSPHostUrl(HttpRequestBase httpRequest)
{
if (httpRequest == null)
{
throw new ArgumentNullException("httpRequest");
}
string spHostUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SPHostUrlKey]);
Uri spHostUrl;
if (Uri.TryCreate(spHostUrlString, UriKind.Absolute, out spHostUrl) &&
(spHostUrl.Scheme == Uri.UriSchemeHttp || spHostUrl.Scheme == Uri.UriSchemeHttps))
{
return spHostUrl;
}
return null;
}
/// <summary>
/// Gets the SharePoint host url from QueryString of the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The specified HTTP request.</param>
/// <returns>The SharePoint host url. Returns <c>null</c> if the HTTP request doesn't contain the SharePoint host url.</returns>
public static Uri GetSPHostUrl(HttpRequest httpRequest)
{
return GetSPHostUrl(new HttpRequestWrapper(httpRequest));
}
/// <summary>
/// The SharePoint host url.
/// </summary>
public Uri SPHostUrl
{
get { return this.spHostUrl; }
}
/// <summary>
/// The SharePoint app web url.
/// </summary>
public Uri SPAppWebUrl
{
get { return this.spAppWebUrl; }
}
/// <summary>
/// The SharePoint language.
/// </summary>
public string SPLanguage
{
get { return this.spLanguage; }
}
/// <summary>
/// The SharePoint client tag.
/// </summary>
public string SPClientTag
{
get { return this.spClientTag; }
}
/// <summary>
/// The SharePoint product number.
/// </summary>
public string SPProductNumber
{
get { return this.spProductNumber; }
}
/// <summary>
/// The user access token for the SharePoint host.
/// </summary>
public abstract string UserAccessTokenForSPHost
{
get;
}
/// <summary>
/// The user access token for the SharePoint app web.
/// </summary>
public abstract string UserAccessTokenForSPAppWeb
{
get;
}
/// <summary>
/// The app only access token for the SharePoint host.
/// </summary>
public abstract string AppOnlyAccessTokenForSPHost
{
get;
}
/// <summary>
/// The app only access token for the SharePoint app web.
/// </summary>
public abstract string AppOnlyAccessTokenForSPAppWeb
{
get;
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="spHostUrl">The SharePoint host url.</param>
/// <param name="spAppWebUrl">The SharePoint app web url.</param>
/// <param name="spLanguage">The SharePoint language.</param>
/// <param name="spClientTag">The SharePoint client tag.</param>
/// <param name="spProductNumber">The SharePoint product number.</param>
protected SharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber)
{
if (spHostUrl == null)
{
throw new ArgumentNullException("spHostUrl");
}
if (string.IsNullOrEmpty(spLanguage))
{
throw new ArgumentNullException("spLanguage");
}
if (string.IsNullOrEmpty(spClientTag))
{
throw new ArgumentNullException("spClientTag");
}
if (string.IsNullOrEmpty(spProductNumber))
{
throw new ArgumentNullException("spProductNumber");
}
this.spHostUrl = spHostUrl;
this.spAppWebUrl = spAppWebUrl;
this.spLanguage = spLanguage;
this.spClientTag = spClientTag;
this.spProductNumber = spProductNumber;
}
/// <summary>
/// Creates a user ClientContext for the SharePoint host.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateUserClientContextForSPHost()
{
return CreateClientContext(this.SPHostUrl, this.UserAccessTokenForSPHost);
}
/// <summary>
/// Creates a user ClientContext for the SharePoint app web.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateUserClientContextForSPAppWeb()
{
return CreateClientContext(this.SPAppWebUrl, this.UserAccessTokenForSPAppWeb);
}
/// <summary>
/// Creates app only ClientContext for the SharePoint host.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateAppOnlyClientContextForSPHost()
{
return CreateClientContext(this.SPHostUrl, this.AppOnlyAccessTokenForSPHost);
}
/// <summary>
/// Creates an app only ClientContext for the SharePoint app web.
/// </summary>
/// <returns>A ClientContext instance.</returns>
public ClientContext CreateAppOnlyClientContextForSPAppWeb()
{
return CreateClientContext(this.SPAppWebUrl, this.AppOnlyAccessTokenForSPAppWeb);
}
/// <summary>
/// Gets the database connection string from SharePoint for autohosted app.
/// </summary>
/// <returns>The database connection string. Returns <c>null</c> if the app is not autohosted or there is no database.</returns>
public string GetDatabaseConnectionString()
{
string dbConnectionString = null;
using (ClientContext clientContext = CreateAppOnlyClientContextForSPHost())
{
if (clientContext != null)
{
var result = AppInstance.RetrieveAppDatabaseConnectionString(clientContext);
clientContext.ExecuteQuery();
dbConnectionString = result.Value;
}
}
if (dbConnectionString == null)
{
const string LocalDBInstanceForDebuggingKey = "LocalDBInstanceForDebugging";
var dbConnectionStringSettings = WebConfigurationManager.ConnectionStrings[LocalDBInstanceForDebuggingKey];
dbConnectionString = dbConnectionStringSettings != null ? dbConnectionStringSettings.ConnectionString : null;
}
return dbConnectionString;
}
/// <summary>
/// Determines if the specified access token is valid.
/// It considers an access token as not valid if it is null, or it has expired.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <returns>True if the access token is valid.</returns>
protected static bool IsAccessTokenValid(Tuple<string, DateTime> accessToken)
{
return accessToken != null &&
!string.IsNullOrEmpty(accessToken.Item1) &&
accessToken.Item2 > DateTime.UtcNow;
}
/// <summary>
/// Creates a ClientContext with the specified SharePoint site url and the access token.
/// </summary>
/// <param name="spSiteUrl">The site url.</param>
/// <param name="accessToken">The access token.</param>
/// <returns>A ClientContext instance.</returns>
private static ClientContext CreateClientContext(Uri spSiteUrl, string accessToken)
{
if (spSiteUrl != null && !string.IsNullOrEmpty(accessToken))
{
return TokenHelper.GetClientContextWithAccessToken(spSiteUrl.AbsoluteUri, accessToken);
}
return null;
}
}
/// <summary>
/// Redirection status.
/// </summary>
public enum RedirectionStatus
{
Ok,
ShouldRedirect,
CanNotRedirect
}
/// <summary>
/// Provides SharePointContext instances.
/// </summary>
public abstract class SharePointContextProvider
{
private static SharePointContextProvider current;
/// <summary>
/// The current SharePointContextProvider instance.
/// </summary>
public static SharePointContextProvider Current
{
get { return SharePointContextProvider.current; }
}
/// <summary>
/// Initializes the default SharePointContextProvider instance.
/// </summary>
static SharePointContextProvider()
{
if (!TokenHelper.IsHighTrustApp())
{
SharePointContextProvider.current = new SharePointAcsContextProvider();
}
else
{
SharePointContextProvider.current = new SharePointHighTrustContextProvider();
}
}
/// <summary>
/// Registers the specified SharePointContextProvider instance as current.
/// It should be called by Application_Start() in Global.asax.
/// </summary>
/// <param name="provider">The SharePointContextProvider to be set as current.</param>
public static void Register(SharePointContextProvider provider)
{
if (provider == null)
{
throw new ArgumentNullException("provider");
}
SharePointContextProvider.current = provider;
}
/// <summary>
/// Checks if it is necessary to redirect to SharePoint for user to authenticate.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param>
/// <returns>Redirection status.</returns>
public static RedirectionStatus CheckRedirectionStatus(HttpContextBase httpContext, out Uri redirectUrl)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
redirectUrl = null;
if (SharePointContextProvider.Current.GetSharePointContext(httpContext) != null)
{
return RedirectionStatus.Ok;
}
const string SPHasRedirectedToSharePointKey = "SPHasRedirectedToSharePoint";
if (!string.IsNullOrEmpty(httpContext.Request.QueryString[SPHasRedirectedToSharePointKey]))
{
return RedirectionStatus.CanNotRedirect;
}
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
if (spHostUrl == null)
{
return RedirectionStatus.CanNotRedirect;
}
if (StringComparer.OrdinalIgnoreCase.Equals(httpContext.Request.HttpMethod, "POST"))
{
return RedirectionStatus.CanNotRedirect;
}
Uri requestUrl = httpContext.Request.Url;
var queryNameValueCollection = HttpUtility.ParseQueryString(requestUrl.Query);
// Removes the values that are included in {StandardTokens}, as {StandardTokens} will be inserted at the beginning of the query string.
queryNameValueCollection.Remove(SharePointContext.SPHostUrlKey);
queryNameValueCollection.Remove(SharePointContext.SPAppWebUrlKey);
queryNameValueCollection.Remove(SharePointContext.SPLanguageKey);
queryNameValueCollection.Remove(SharePointContext.SPClientTagKey);
queryNameValueCollection.Remove(SharePointContext.SPProductNumberKey);
// Adds SPHasRedirectedToSharePoint=1.
queryNameValueCollection.Add(SPHasRedirectedToSharePointKey, "1");
UriBuilder returnUrlBuilder = new UriBuilder(requestUrl);
returnUrlBuilder.Query = queryNameValueCollection.ToString();
// Inserts StandardTokens.
const string StandardTokens = "{StandardTokens}";
string returnUrlString = returnUrlBuilder.Uri.AbsoluteUri;
returnUrlString = returnUrlString.Insert(returnUrlString.IndexOf("?") + 1, StandardTokens + "&");
// Constructs redirect url.
string redirectUrlString = TokenHelper.GetAppContextTokenRequestUrl(spHostUrl.AbsoluteUri, Uri.EscapeDataString(returnUrlString));
redirectUrl = new Uri(redirectUrlString, UriKind.Absolute);
return RedirectionStatus.ShouldRedirect;
}
/// <summary>
/// Checks if it is necessary to redirect to SharePoint for user to authenticate.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <param name="redirectUrl">The redirect url to SharePoint if the status is ShouldRedirect. <c>Null</c> if the status is Ok or CanNotRedirect.</param>
/// <returns>Redirection status.</returns>
public static RedirectionStatus CheckRedirectionStatus(HttpContext httpContext, out Uri redirectUrl)
{
return CheckRedirectionStatus(new HttpContextWrapper(httpContext), out redirectUrl);
}
/// <summary>
/// Creates a SharePointContext instance with the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
public SharePointContext CreateSharePointContext(HttpRequestBase httpRequest)
{
if (httpRequest == null)
{
throw new ArgumentNullException("httpRequest");
}
// SPHostUrl
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpRequest);
if (spHostUrl == null)
{
return null;
}
// SPAppWebUrl
string spAppWebUrlString = TokenHelper.EnsureTrailingSlash(httpRequest.QueryString[SharePointContext.SPAppWebUrlKey]);
Uri spAppWebUrl;
if (!Uri.TryCreate(spAppWebUrlString, UriKind.Absolute, out spAppWebUrl) ||
!(spAppWebUrl.Scheme == Uri.UriSchemeHttp || spAppWebUrl.Scheme == Uri.UriSchemeHttps))
{
spAppWebUrl = null;
}
// SPLanguage
string spLanguage = httpRequest.QueryString[SharePointContext.SPLanguageKey];
if (string.IsNullOrEmpty(spLanguage))
{
return null;
}
// SPClientTag
string spClientTag = httpRequest.QueryString[SharePointContext.SPClientTagKey];
if (string.IsNullOrEmpty(spClientTag))
{
return null;
}
// SPProductNumber
string spProductNumber = httpRequest.QueryString[SharePointContext.SPProductNumberKey];
if (string.IsNullOrEmpty(spProductNumber))
{
return null;
}
return CreateSharePointContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, httpRequest);
}
/// <summary>
/// Creates a SharePointContext instance with the specified HTTP request.
/// </summary>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
public SharePointContext CreateSharePointContext(HttpRequest httpRequest)
{
return CreateSharePointContext(new HttpRequestWrapper(httpRequest));
}
/// <summary>
/// Gets a SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns>
public SharePointContext GetSharePointContext(HttpContextBase httpContext)
{
if (httpContext == null)
{
throw new ArgumentNullException("httpContext");
}
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
if (spHostUrl == null)
{
return null;
}
SharePointContext spContext = LoadSharePointContext(httpContext);
if (spContext == null || !ValidateSharePointContext(spContext, httpContext))
{
spContext = CreateSharePointContext(httpContext.Request);
if (spContext != null)
{
SaveSharePointContext(spContext, httpContext);
}
}
return spContext;
}
/// <summary>
/// Gets a SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found and a new instance can't be created.</returns>
public SharePointContext GetSharePointContext(HttpContext httpContext)
{
return GetSharePointContext(new HttpContextWrapper(httpContext));
}
/// <summary>
/// Creates a SharePointContext instance.
/// </summary>
/// <param name="spHostUrl">The SharePoint host url.</param>
/// <param name="spAppWebUrl">The SharePoint app web url.</param>
/// <param name="spLanguage">The SharePoint language.</param>
/// <param name="spClientTag">The SharePoint client tag.</param>
/// <param name="spProductNumber">The SharePoint product number.</param>
/// <param name="httpRequest">The HTTP request.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if errors occur.</returns>
protected abstract SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest);
/// <summary>
/// Validates if the given SharePointContext can be used with the specified HTTP context.
/// </summary>
/// <param name="spContext">The SharePointContext.</param>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>True if the given SharePointContext can be used with the specified HTTP context.</returns>
protected abstract bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext);
/// <summary>
/// Loads the SharePointContext instance associated with the specified HTTP context.
/// </summary>
/// <param name="httpContext">The HTTP context.</param>
/// <returns>The SharePointContext instance. Returns <c>null</c> if not found.</returns>
protected abstract SharePointContext LoadSharePointContext(HttpContextBase httpContext);
/// <summary>
/// Saves the specified SharePointContext instance associated with the specified HTTP context.
/// <c>null</c> is accepted for clearing the SharePointContext instance associated with the HTTP context.
/// </summary>
/// <param name="spContext">The SharePointContext instance to be saved, or <c>null</c>.</param>
/// <param name="httpContext">The HTTP context.</param>
protected abstract void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext);
}
#region ACS
/// <summary>
/// Encapsulates all the information from SharePoint in ACS mode.
/// </summary>
public class SharePointAcsContext : SharePointContext
{
private readonly string contextToken;
private readonly SharePointContextToken contextTokenObj;
/// <summary>
/// The context token.
/// </summary>
public string ContextToken
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextToken : null; }
}
/// <summary>
/// The context token's "CacheKey" claim.
/// </summary>
public string CacheKey
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.CacheKey : null; }
}
/// <summary>
/// The context token's "refreshtoken" claim.
/// </summary>
public string RefreshToken
{
get { return this.contextTokenObj.ValidTo > DateTime.UtcNow ? this.contextTokenObj.RefreshToken : null; }
}
public override string UserAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.userAccessTokenForSPHost,
() => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPHostUrl.Authority));
}
}
public override string UserAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb,
() => TokenHelper.GetAccessToken(this.contextTokenObj, this.SPAppWebUrl.Authority));
}
}
public override string AppOnlyAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost,
() => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPHostUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPHostUrl)));
}
}
public override string AppOnlyAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb,
() => TokenHelper.GetAppOnlyAccessToken(TokenHelper.SharePointPrincipal, this.SPAppWebUrl.Authority, TokenHelper.GetRealmFromTargetUrl(this.SPAppWebUrl)));
}
}
public SharePointAcsContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, string contextToken, SharePointContextToken contextTokenObj)
: base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber)
{
if (string.IsNullOrEmpty(contextToken))
{
throw new ArgumentNullException("contextToken");
}
if (contextTokenObj == null)
{
throw new ArgumentNullException("contextTokenObj");
}
this.contextToken = contextToken;
this.contextTokenObj = contextTokenObj;
}
/// <summary>
/// Ensures the access token is valid and returns it.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
/// <returns>The access token string.</returns>
private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler)
{
RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler);
return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null;
}
/// <summary>
/// Renews the access token if it is not valid.
/// </summary>
/// <param name="accessToken">The access token to renew.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<OAuth2AccessTokenResponse> tokenRenewalHandler)
{
if (IsAccessTokenValid(accessToken))
{
return;
}
try
{
OAuth2AccessTokenResponse oAuth2AccessTokenResponse = tokenRenewalHandler();
DateTime expiresOn = oAuth2AccessTokenResponse.ExpiresOn;
if ((expiresOn - oAuth2AccessTokenResponse.NotBefore) > AccessTokenLifetimeTolerance)
{
// Make the access token get renewed a bit earlier than the time when it expires
// so that the calls to SharePoint with it will have enough time to complete successfully.
expiresOn -= AccessTokenLifetimeTolerance;
}
accessToken = Tuple.Create(oAuth2AccessTokenResponse.AccessToken, expiresOn);
}
catch (WebException)
{
}
}
}
/// <summary>
/// Default provider for SharePointAcsContext.
/// </summary>
public class SharePointAcsContextProvider : SharePointContextProvider
{
private const string SPContextKey = "SPContext";
private const string SPCacheKeyKey = "SPCacheKey";
protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest)
{
string contextTokenString = TokenHelper.GetContextTokenFromRequest(httpRequest);
if (string.IsNullOrEmpty(contextTokenString))
{
return null;
}
SharePointContextToken contextToken = null;
try
{
contextToken = TokenHelper.ReadAndValidateContextToken(contextTokenString, httpRequest.Url.Authority);
}
catch (WebException)
{
return null;
}
catch (AudienceUriValidationFailedException)
{
return null;
}
return new SharePointAcsContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, contextTokenString, contextToken);
}
protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointAcsContext spAcsContext = spContext as SharePointAcsContext;
if (spAcsContext != null)
{
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
string contextToken = TokenHelper.GetContextTokenFromRequest(httpContext.Request);
HttpCookie spCacheKeyCookie = httpContext.Request.Cookies[SPCacheKeyKey];
string spCacheKey = spCacheKeyCookie != null ? spCacheKeyCookie.Value : null;
return spHostUrl == spAcsContext.SPHostUrl &&
!string.IsNullOrEmpty(spAcsContext.CacheKey) &&
spCacheKey == spAcsContext.CacheKey &&
!string.IsNullOrEmpty(spAcsContext.ContextToken) &&
(string.IsNullOrEmpty(contextToken) || contextToken == spAcsContext.ContextToken);
}
return false;
}
protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext)
{
return httpContext.Session[SPContextKey] as SharePointAcsContext;
}
protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointAcsContext spAcsContext = spContext as SharePointAcsContext;
if (spAcsContext != null)
{
HttpCookie spCacheKeyCookie = new HttpCookie(SPCacheKeyKey)
{
Value = spAcsContext.CacheKey,
Secure = true,
HttpOnly = true
};
httpContext.Response.AppendCookie(spCacheKeyCookie);
}
httpContext.Session[SPContextKey] = spAcsContext;
}
}
#endregion ACS
#region HighTrust
/// <summary>
/// Encapsulates all the information from SharePoint in HighTrust mode.
/// </summary>
public class SharePointHighTrustContext : SharePointContext
{
private readonly WindowsIdentity logonUserIdentity;
/// <summary>
/// The Windows identity for the current user.
/// </summary>
public WindowsIdentity LogonUserIdentity
{
get { return this.logonUserIdentity; }
}
public override string UserAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.userAccessTokenForSPHost,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, this.LogonUserIdentity));
}
}
public override string UserAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.userAccessTokenForSPAppWeb,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, this.LogonUserIdentity));
}
}
public override string AppOnlyAccessTokenForSPHost
{
get
{
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPHost,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPHostUrl, null));
}
}
public override string AppOnlyAccessTokenForSPAppWeb
{
get
{
if (this.SPAppWebUrl == null)
{
return null;
}
return GetAccessTokenString(ref this.appOnlyAccessTokenForSPAppWeb,
() => TokenHelper.GetS2SAccessTokenWithWindowsIdentity(this.SPAppWebUrl, null));
}
}
public SharePointHighTrustContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, WindowsIdentity logonUserIdentity)
: base(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber)
{
if (logonUserIdentity == null)
{
throw new ArgumentNullException("logonUserIdentity");
}
this.logonUserIdentity = logonUserIdentity;
}
/// <summary>
/// Ensures the access token is valid and returns it.
/// </summary>
/// <param name="accessToken">The access token to verify.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
/// <returns>The access token string.</returns>
private static string GetAccessTokenString(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler)
{
RenewAccessTokenIfNeeded(ref accessToken, tokenRenewalHandler);
return IsAccessTokenValid(accessToken) ? accessToken.Item1 : null;
}
/// <summary>
/// Renews the access token if it is not valid.
/// </summary>
/// <param name="accessToken">The access token to renew.</param>
/// <param name="tokenRenewalHandler">The token renewal handler.</param>
private static void RenewAccessTokenIfNeeded(ref Tuple<string, DateTime> accessToken, Func<string> tokenRenewalHandler)
{
if (IsAccessTokenValid(accessToken))
{
return;
}
DateTime expiresOn = DateTime.UtcNow.Add(TokenHelper.HighTrustAccessTokenLifetime);
if (TokenHelper.HighTrustAccessTokenLifetime > AccessTokenLifetimeTolerance)
{
// Make the access token get renewed a bit earlier than the time when it expires
// so that the calls to SharePoint with it will have enough time to complete successfully.
expiresOn -= AccessTokenLifetimeTolerance;
}
accessToken = Tuple.Create(tokenRenewalHandler(), expiresOn);
}
}
/// <summary>
/// Default provider for SharePointHighTrustContext.
/// </summary>
public class SharePointHighTrustContextProvider : SharePointContextProvider
{
private const string SPContextKey = "SPContext";
protected override SharePointContext CreateSharePointContext(Uri spHostUrl, Uri spAppWebUrl, string spLanguage, string spClientTag, string spProductNumber, HttpRequestBase httpRequest)
{
WindowsIdentity logonUserIdentity = httpRequest.LogonUserIdentity;
if (logonUserIdentity == null || !logonUserIdentity.IsAuthenticated || logonUserIdentity.IsGuest || logonUserIdentity.User == null)
{
return null;
}
return new SharePointHighTrustContext(spHostUrl, spAppWebUrl, spLanguage, spClientTag, spProductNumber, logonUserIdentity);
}
protected override bool ValidateSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
SharePointHighTrustContext spHighTrustContext = spContext as SharePointHighTrustContext;
if (spHighTrustContext != null)
{
Uri spHostUrl = SharePointContext.GetSPHostUrl(httpContext.Request);
WindowsIdentity logonUserIdentity = httpContext.Request.LogonUserIdentity;
return spHostUrl == spHighTrustContext.SPHostUrl &&
logonUserIdentity != null &&
logonUserIdentity.IsAuthenticated &&
!logonUserIdentity.IsGuest &&
logonUserIdentity.User == spHighTrustContext.LogonUserIdentity.User;
}
return false;
}
protected override SharePointContext LoadSharePointContext(HttpContextBase httpContext)
{
return httpContext.Session[SPContextKey] as SharePointHighTrustContext;
}
protected override void SaveSharePointContext(SharePointContext spContext, HttpContextBase httpContext)
{
httpContext.Session[SPContextKey] = spContext as SharePointHighTrustContext;
}
}
#endregion HighTrust
}
| |
namespace iControl {
using System.Xml.Serialization;
using System.Web.Services;
using System.ComponentModel;
using System.Web.Services.Protocols;
using System;
using System.Diagnostics;
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Web.Services.WebServiceBindingAttribute(Name="LocalLB.ProfileFIXBinding", Namespace="urn:iControl")]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileFIXFIXProfileStatistics))]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileFIXProfileErrorAction))]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileEnabledState))]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileString))]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileULong))]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileFIXProfileSenderTagClassArray))]
[System.Xml.Serialization.SoapIncludeAttribute(typeof(LocalLBProfileStatisticsByVirtual))]
public partial class LocalLBProfileFIX : iControlInterface {
public LocalLBProfileFIX() {
this.Url = "https://url_to_service";
}
//=======================================================================
// Operations
//=======================================================================
//-----------------------------------------------------------------------
// create
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void create(
string [] profile_names
) {
this.Invoke("create", new object [] {
profile_names});
}
public System.IAsyncResult Begincreate(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("create", new object[] {
profile_names}, callback, asyncState);
}
public void Endcreate(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// delete_all_profiles
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void delete_all_profiles(
) {
this.Invoke("delete_all_profiles", new object [0]);
}
public System.IAsyncResult Begindelete_all_profiles(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("delete_all_profiles", new object[0], callback, asyncState);
}
public void Enddelete_all_profiles(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// delete_profile
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void delete_profile(
string [] profile_names
) {
this.Invoke("delete_profile", new object [] {
profile_names});
}
public System.IAsyncResult Begindelete_profile(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("delete_profile", new object[] {
profile_names}, callback, asyncState);
}
public void Enddelete_profile(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// get_all_statistics
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileFIXFIXProfileStatistics get_all_statistics(
) {
object [] results = this.Invoke("get_all_statistics", new object [0]);
return ((LocalLBProfileFIXFIXProfileStatistics)(results[0]));
}
public System.IAsyncResult Beginget_all_statistics(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_all_statistics", new object[0], callback, asyncState);
}
public LocalLBProfileFIXFIXProfileStatistics Endget_all_statistics(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileFIXFIXProfileStatistics)(results[0]));
}
//-----------------------------------------------------------------------
// get_default_profile
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_default_profile(
string [] profile_names
) {
object [] results = this.Invoke("get_default_profile", new object [] {
profile_names});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_default_profile(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_default_profile", new object[] {
profile_names}, callback, asyncState);
}
public string [] Endget_default_profile(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_description
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_description(
string [] profile_names
) {
object [] results = this.Invoke("get_description", new object [] {
profile_names});
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_description(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_description", new object[] {
profile_names}, callback, asyncState);
}
public string [] Endget_description(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_error_action
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileFIXProfileErrorAction [] get_error_action(
string [] profile_names
) {
object [] results = this.Invoke("get_error_action", new object [] {
profile_names});
return ((LocalLBProfileFIXProfileErrorAction [])(results[0]));
}
public System.IAsyncResult Beginget_error_action(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_error_action", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileFIXProfileErrorAction [] Endget_error_action(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileFIXProfileErrorAction [])(results[0]));
}
//-----------------------------------------------------------------------
// get_full_logon_parsing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileEnabledState [] get_full_logon_parsing_state(
string [] profile_names
) {
object [] results = this.Invoke("get_full_logon_parsing_state", new object [] {
profile_names});
return ((LocalLBProfileEnabledState [])(results[0]));
}
public System.IAsyncResult Beginget_full_logon_parsing_state(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_full_logon_parsing_state", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileEnabledState [] Endget_full_logon_parsing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileEnabledState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_list
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string [] get_list(
) {
object [] results = this.Invoke("get_list", new object [0]);
return ((string [])(results[0]));
}
public System.IAsyncResult Beginget_list(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_list", new object[0], callback, asyncState);
}
public string [] Endget_list(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string [])(results[0]));
}
//-----------------------------------------------------------------------
// get_message_log_publisher
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileString [] get_message_log_publisher(
string [] profile_names
) {
object [] results = this.Invoke("get_message_log_publisher", new object [] {
profile_names});
return ((LocalLBProfileString [])(results[0]));
}
public System.IAsyncResult Beginget_message_log_publisher(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_message_log_publisher", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileString [] Endget_message_log_publisher(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileString [])(results[0]));
}
//-----------------------------------------------------------------------
// get_quick_parsing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileEnabledState [] get_quick_parsing_state(
string [] profile_names
) {
object [] results = this.Invoke("get_quick_parsing_state", new object [] {
profile_names});
return ((LocalLBProfileEnabledState [])(results[0]));
}
public System.IAsyncResult Beginget_quick_parsing_state(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_quick_parsing_state", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileEnabledState [] Endget_quick_parsing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileEnabledState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_rate_sample_interval
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileULong [] get_rate_sample_interval(
string [] profile_names
) {
object [] results = this.Invoke("get_rate_sample_interval", new object [] {
profile_names});
return ((LocalLBProfileULong [])(results[0]));
}
public System.IAsyncResult Beginget_rate_sample_interval(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_rate_sample_interval", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileULong [] Endget_rate_sample_interval(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileULong [])(results[0]));
}
//-----------------------------------------------------------------------
// get_report_log_publisher
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileString [] get_report_log_publisher(
string [] profile_names
) {
object [] results = this.Invoke("get_report_log_publisher", new object [] {
profile_names});
return ((LocalLBProfileString [])(results[0]));
}
public System.IAsyncResult Beginget_report_log_publisher(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_report_log_publisher", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileString [] Endget_report_log_publisher(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileString [])(results[0]));
}
//-----------------------------------------------------------------------
// get_response_parsing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileEnabledState [] get_response_parsing_state(
string [] profile_names
) {
object [] results = this.Invoke("get_response_parsing_state", new object [] {
profile_names});
return ((LocalLBProfileEnabledState [])(results[0]));
}
public System.IAsyncResult Beginget_response_parsing_state(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_response_parsing_state", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileEnabledState [] Endget_response_parsing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileEnabledState [])(results[0]));
}
//-----------------------------------------------------------------------
// get_sender_tag_class
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileFIXProfileSenderTagClassArray [] get_sender_tag_class(
string [] profile_names
) {
object [] results = this.Invoke("get_sender_tag_class", new object [] {
profile_names});
return ((LocalLBProfileFIXProfileSenderTagClassArray [])(results[0]));
}
public System.IAsyncResult Beginget_sender_tag_class(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_sender_tag_class", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileFIXProfileSenderTagClassArray [] Endget_sender_tag_class(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileFIXProfileSenderTagClassArray [])(results[0]));
}
//-----------------------------------------------------------------------
// get_statistics
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileFIXFIXProfileStatistics get_statistics(
string [] profile_names
) {
object [] results = this.Invoke("get_statistics", new object [] {
profile_names});
return ((LocalLBProfileFIXFIXProfileStatistics)(results[0]));
}
public System.IAsyncResult Beginget_statistics(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_statistics", new object[] {
profile_names}, callback, asyncState);
}
public LocalLBProfileFIXFIXProfileStatistics Endget_statistics(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileFIXFIXProfileStatistics)(results[0]));
}
//-----------------------------------------------------------------------
// get_statistics_by_virtual
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public LocalLBProfileStatisticsByVirtual get_statistics_by_virtual(
string [] profile_names,
string [] [] virtual_names
) {
object [] results = this.Invoke("get_statistics_by_virtual", new object [] {
profile_names,
virtual_names});
return ((LocalLBProfileStatisticsByVirtual)(results[0]));
}
public System.IAsyncResult Beginget_statistics_by_virtual(string [] profile_names,string [] [] virtual_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_statistics_by_virtual", new object[] {
profile_names,
virtual_names}, callback, asyncState);
}
public LocalLBProfileStatisticsByVirtual Endget_statistics_by_virtual(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((LocalLBProfileStatisticsByVirtual)(results[0]));
}
//-----------------------------------------------------------------------
// get_version
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public string get_version(
) {
object [] results = this.Invoke("get_version", new object [] {
});
return ((string)(results[0]));
}
public System.IAsyncResult Beginget_version(System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("get_version", new object[] {
}, callback, asyncState);
}
public string Endget_version(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((string)(results[0]));
}
//-----------------------------------------------------------------------
// is_base_profile
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public bool [] is_base_profile(
string [] profile_names
) {
object [] results = this.Invoke("is_base_profile", new object [] {
profile_names});
return ((bool [])(results[0]));
}
public System.IAsyncResult Beginis_base_profile(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("is_base_profile", new object[] {
profile_names}, callback, asyncState);
}
public bool [] Endis_base_profile(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((bool [])(results[0]));
}
//-----------------------------------------------------------------------
// is_system_profile
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
[return: System.Xml.Serialization.SoapElementAttribute("return")]
public bool [] is_system_profile(
string [] profile_names
) {
object [] results = this.Invoke("is_system_profile", new object [] {
profile_names});
return ((bool [])(results[0]));
}
public System.IAsyncResult Beginis_system_profile(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("is_system_profile", new object[] {
profile_names}, callback, asyncState);
}
public bool [] Endis_system_profile(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
return ((bool [])(results[0]));
}
//-----------------------------------------------------------------------
// replace_sender_tag_class
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void replace_sender_tag_class(
string [] profile_names,
LocalLBProfileFIXProfileSenderTagClassArray [] values
) {
this.Invoke("replace_sender_tag_class", new object [] {
profile_names,
values});
}
public System.IAsyncResult Beginreplace_sender_tag_class(string [] profile_names,LocalLBProfileFIXProfileSenderTagClassArray [] values, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("replace_sender_tag_class", new object[] {
profile_names,
values}, callback, asyncState);
}
public void Endreplace_sender_tag_class(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// reset_statistics
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void reset_statistics(
string [] profile_names
) {
this.Invoke("reset_statistics", new object [] {
profile_names});
}
public System.IAsyncResult Beginreset_statistics(string [] profile_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("reset_statistics", new object[] {
profile_names}, callback, asyncState);
}
public void Endreset_statistics(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// reset_statistics_by_virtual
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void reset_statistics_by_virtual(
string [] profile_names,
string [] [] virtual_names
) {
this.Invoke("reset_statistics_by_virtual", new object [] {
profile_names,
virtual_names});
}
public System.IAsyncResult Beginreset_statistics_by_virtual(string [] profile_names,string [] [] virtual_names, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("reset_statistics_by_virtual", new object[] {
profile_names,
virtual_names}, callback, asyncState);
}
public void Endreset_statistics_by_virtual(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_default_profile
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_default_profile(
string [] profile_names,
string [] defaults
) {
this.Invoke("set_default_profile", new object [] {
profile_names,
defaults});
}
public System.IAsyncResult Beginset_default_profile(string [] profile_names,string [] defaults, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_default_profile", new object[] {
profile_names,
defaults}, callback, asyncState);
}
public void Endset_default_profile(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_description
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_description(
string [] profile_names,
string [] descriptions
) {
this.Invoke("set_description", new object [] {
profile_names,
descriptions});
}
public System.IAsyncResult Beginset_description(string [] profile_names,string [] descriptions, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_description", new object[] {
profile_names,
descriptions}, callback, asyncState);
}
public void Endset_description(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_error_action
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_error_action(
string [] profile_names,
LocalLBProfileFIXProfileErrorAction [] actions
) {
this.Invoke("set_error_action", new object [] {
profile_names,
actions});
}
public System.IAsyncResult Beginset_error_action(string [] profile_names,LocalLBProfileFIXProfileErrorAction [] actions, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_error_action", new object[] {
profile_names,
actions}, callback, asyncState);
}
public void Endset_error_action(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_full_logon_parsing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_full_logon_parsing_state(
string [] profile_names,
LocalLBProfileEnabledState [] states
) {
this.Invoke("set_full_logon_parsing_state", new object [] {
profile_names,
states});
}
public System.IAsyncResult Beginset_full_logon_parsing_state(string [] profile_names,LocalLBProfileEnabledState [] states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_full_logon_parsing_state", new object[] {
profile_names,
states}, callback, asyncState);
}
public void Endset_full_logon_parsing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_message_log_publisher
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_message_log_publisher(
string [] profile_names,
LocalLBProfileString [] publishers
) {
this.Invoke("set_message_log_publisher", new object [] {
profile_names,
publishers});
}
public System.IAsyncResult Beginset_message_log_publisher(string [] profile_names,LocalLBProfileString [] publishers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_message_log_publisher", new object[] {
profile_names,
publishers}, callback, asyncState);
}
public void Endset_message_log_publisher(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_quick_parsing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_quick_parsing_state(
string [] profile_names,
LocalLBProfileEnabledState [] states
) {
this.Invoke("set_quick_parsing_state", new object [] {
profile_names,
states});
}
public System.IAsyncResult Beginset_quick_parsing_state(string [] profile_names,LocalLBProfileEnabledState [] states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_quick_parsing_state", new object[] {
profile_names,
states}, callback, asyncState);
}
public void Endset_quick_parsing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_rate_sample_interval
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_rate_sample_interval(
string [] profile_names,
LocalLBProfileULong [] values
) {
this.Invoke("set_rate_sample_interval", new object [] {
profile_names,
values});
}
public System.IAsyncResult Beginset_rate_sample_interval(string [] profile_names,LocalLBProfileULong [] values, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_rate_sample_interval", new object[] {
profile_names,
values}, callback, asyncState);
}
public void Endset_rate_sample_interval(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_report_log_publisher
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_report_log_publisher(
string [] profile_names,
LocalLBProfileString [] publishers
) {
this.Invoke("set_report_log_publisher", new object [] {
profile_names,
publishers});
}
public System.IAsyncResult Beginset_report_log_publisher(string [] profile_names,LocalLBProfileString [] publishers, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_report_log_publisher", new object[] {
profile_names,
publishers}, callback, asyncState);
}
public void Endset_report_log_publisher(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
//-----------------------------------------------------------------------
// set_response_parsing_state
//-----------------------------------------------------------------------
[System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:LocalLB/ProfileFIX",
RequestNamespace="urn:iControl:LocalLB/ProfileFIX", ResponseNamespace="urn:iControl:LocalLB/ProfileFIX")]
public void set_response_parsing_state(
string [] profile_names,
LocalLBProfileEnabledState [] states
) {
this.Invoke("set_response_parsing_state", new object [] {
profile_names,
states});
}
public System.IAsyncResult Beginset_response_parsing_state(string [] profile_names,LocalLBProfileEnabledState [] states, System.AsyncCallback callback, object asyncState) {
return this.BeginInvoke("set_response_parsing_state", new object[] {
profile_names,
states}, callback, asyncState);
}
public void Endset_response_parsing_state(System.IAsyncResult asyncResult) {
object [] results = this.EndInvoke(asyncResult);
}
}
//=======================================================================
// Enums
//=======================================================================
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "LocalLB.ProfileFIX.ErrorAction", Namespace = "urn:iControl")]
public enum LocalLBProfileFIXErrorAction
{
ERROR_ACTION_UNKNOWN,
ERROR_ACTION_DONT_FORWARD,
ERROR_ACTION_DROP_CONNECTION,
}
//=======================================================================
// Structs
//=======================================================================
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "LocalLB.ProfileFIX.FIXProfileStatisticEntry", Namespace = "urn:iControl")]
public partial class LocalLBProfileFIXFIXProfileStatisticEntry
{
private string profile_nameField;
public string profile_name
{
get { return this.profile_nameField; }
set { this.profile_nameField = value; }
}
private CommonStatistic [] statisticsField;
public CommonStatistic [] statistics
{
get { return this.statisticsField; }
set { this.statisticsField = value; }
}
};
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "LocalLB.ProfileFIX.FIXProfileStatistics", Namespace = "urn:iControl")]
public partial class LocalLBProfileFIXFIXProfileStatistics
{
private LocalLBProfileFIXFIXProfileStatisticEntry [] statisticsField;
public LocalLBProfileFIXFIXProfileStatisticEntry [] statistics
{
get { return this.statisticsField; }
set { this.statisticsField = value; }
}
private CommonTimeStamp time_stampField;
public CommonTimeStamp time_stamp
{
get { return this.time_stampField; }
set { this.time_stampField = value; }
}
};
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "LocalLB.ProfileFIX.ProfileErrorAction", Namespace = "urn:iControl")]
public partial class LocalLBProfileFIXProfileErrorAction
{
private LocalLBProfileFIXErrorAction valueField;
public LocalLBProfileFIXErrorAction value
{
get { return this.valueField; }
set { this.valueField = value; }
}
private bool default_flagField;
public bool default_flag
{
get { return this.default_flagField; }
set { this.default_flagField = value; }
}
};
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "LocalLB.ProfileFIX.ProfileSenderTagClassArray", Namespace = "urn:iControl")]
public partial class LocalLBProfileFIXProfileSenderTagClassArray
{
private LocalLBProfileFIXSenderTagClass [] valueField;
public LocalLBProfileFIXSenderTagClass [] value
{
get { return this.valueField; }
set { this.valueField = value; }
}
private bool default_flagField;
public bool default_flag
{
get { return this.default_flagField; }
set { this.default_flagField = value; }
}
};
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.SoapTypeAttribute(TypeName = "LocalLB.ProfileFIX.SenderTagClass", Namespace = "urn:iControl")]
public partial class LocalLBProfileFIXSenderTagClass
{
private string sender_idField;
public string sender_id
{
get { return this.sender_idField; }
set { this.sender_idField = value; }
}
private string tag_map_classField;
public string tag_map_class
{
get { return this.tag_map_classField; }
set { this.tag_map_classField = value; }
}
};
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.Windows.Forms;
using System.IO;
using System.Threading;
using System.Xml;
using System.Configuration;
using MarkHeath.MidiUtils.Properties;
namespace MarkHeath.MidiUtils
{
public partial class MainForm : Form
{
bool workQueued;
NamingRules namingRules;
MidiConverter midiConverter;
Properties.Settings settings;
public MainForm()
{
InitializeComponent();
settings = Properties.Settings.Default;
if (Settings.Default.FirstTime)
{
UpgradeSettings();
}
// could look in HKLM \ Software \ Toontrack \ Superior \ EZDrummer \ HomePath
LoadSettings();
}
private void UpgradeSettings()
{
string productVersion = (string)settings.GetPreviousVersion("ProductVersion");
if ((productVersion != null) && (productVersion.Length > 0))
{
settings.InputFolder = (string)settings.GetPreviousVersion("InputFolder");
settings.OutputFolder = (string)settings.GetPreviousVersion("OutputFolder");
settings.OutputChannelNumber = (int)settings.GetPreviousVersion("OutputChannelNumber");
settings.OutputMidiType = (OutputMidiType)settings.GetPreviousVersion("OutputMidiType");
settings.VerboseOutput = (bool)settings.GetPreviousVersion("VerboseOutput");
settings.UseFileName = (bool)settings.GetPreviousVersion("UseFileName");
try
{
settings.AddNameMarker = (bool)settings.GetPreviousVersion("AddNameMarker");
settings.TrimTextEvents = (bool)settings.GetPreviousVersion("TrimTextEvents");
settings.RemoveEmptyTracks = (bool)settings.GetPreviousVersion("RemoveEmptyTracks");
settings.RemoveSequencerSpecific = (bool)settings.GetPreviousVersion("RemoveSequencerSpecific");
settings.RecreateEndTrackMarkers = (bool)settings.GetPreviousVersion("RecreateEndTrackMarkers");
settings.RemoveExtraTempoEvents = (bool)settings.GetPreviousVersion("RemoveExtraTempoEvents");
settings.RemoveExtraMarkers = (bool)settings.GetPreviousVersion("RemoveExtraMarkers");
// add new settings at the bottom
}
catch (SettingsPropertyNotFoundException)
{
}
}
}
private void LoadSettings()
{
string programFiles = Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles);
if (settings.InputFolder.Length == 0)
textBoxInputFolder.Text = Path.Combine(programFiles, "Toontrack\\EZDrummer\\Midi");
else
textBoxInputFolder.Text = settings.InputFolder;
if(settings.OutputFolder.Length == 0)
textBoxOutputFolder.Text = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
else
textBoxOutputFolder.Text = settings.OutputFolder;
checkBoxApplyNamingRules.Checked = settings.ApplyNamingRules;
checkBoxUseFilename.Checked = settings.UseFileName;
checkBoxVerbose.Checked = settings.VerboseOutput;
if (settings.OutputMidiType == OutputMidiType.Type0)
radioButtonType0.Checked = true;
else if (settings.OutputMidiType == OutputMidiType.Type1)
radioButtonType1.Checked = true;
else
radioButtonTypeUnchanged.Checked = true;
if (settings.OutputChannelNumber == 1)
radioButtonChannel1.Checked = true;
else if (settings.OutputChannelNumber == 10)
radioButtonChannel10.Checked = true;
else
radioButtonChannelUnchanged.Checked = true;
}
private void UpdateSettings()
{
settings.InputFolder = textBoxInputFolder.Text;
settings.OutputFolder = textBoxOutputFolder.Text;
settings.ApplyNamingRules = checkBoxApplyNamingRules.Checked;
settings.VerboseOutput = checkBoxVerbose.Checked;
settings.UseFileName = checkBoxUseFilename.Checked;
if(radioButtonType0.Checked)
settings.OutputMidiType = OutputMidiType.Type0;
else if(radioButtonType1.Checked)
settings.OutputMidiType = OutputMidiType.Type1;
else
settings.OutputMidiType = OutputMidiType.LeaveUnchanged;
if (radioButtonChannel1.Checked)
settings.OutputChannelNumber = 1;
else if (radioButtonChannel10.Checked)
settings.OutputChannelNumber = 10;
else
settings.OutputChannelNumber = -1;
}
private void MainForm_Load(object sender, EventArgs args)
{
string executableFolder = Path.GetDirectoryName(Application.ExecutablePath);
try
{
namingRules = NamingRules.LoadRules(Path.Combine(executableFolder, "NamingRules.xml"));
}
catch (Exception e)
{
MessageBox.Show(String.Format("Error reading NamingRules.xml\r\n{0}", e.ToString()), Application.ProductName);
Close();
}
}
private void buttonConvert_Click(object sender, EventArgs e)
{
if (workQueued)
{
MessageBox.Show("Please wait until the current operation has finished", Application.ProductName);
}
else
{
UpdateSettings();
if (!CheckInputFolderExists())
return;
if (!CheckOutputFolderExists())
return;
if (!CheckOutputFolderIsEmpty())
return;
this.Cursor = Cursors.WaitCursor;
UpdateSettings();
workQueued = ThreadPool.QueueUserWorkItem(new WaitCallback(ConvertThreadProc));
if (workQueued)
{
this.Cursor = Cursors.WaitCursor;
}
}
}
protected override void OnClosing(CancelEventArgs e)
{
if (workQueued)
{
MessageBox.Show("Please wait until the current operation has finished", Application.ProductName);
e.Cancel = true;
}
else
{
UpdateSettings();
settings.FirstTime = false;
settings.ProductVersion = Application.ProductVersion;
settings.Save();
}
base.OnClosing(e);
}
private void ConvertThreadProc(object state)
{
try
{
progressLog1.ClearLog();
midiConverter = new MidiConverter(namingRules);
midiConverter.Progress += new EventHandler<NAudio.Utils.ProgressEventArgs>(midiConverter_Progress);
midiConverter.Start();
}
finally
{
workQueued = false;
this.Invoke(new FinishedDelegate(ShowFinishedMessage));
}
}
void midiConverter_Progress(object sender, NAudio.Utils.ProgressEventArgs e)
{
progressLog1.ReportProgress(e);
}
delegate void FinishedDelegate();
void ShowFinishedMessage()
{
this.Cursor = Cursors.Default;
saveLogToolStripMenuItem.Enabled = true;
MessageBox.Show(String.Format("Finished:\r\n{0}", midiConverter.Summary), Application.ProductName);
}
private bool CheckInputFolderExists()
{
if (!Directory.Exists(textBoxInputFolder.Text))
{
DialogResult result = MessageBox.Show("Your selected input folder does not exist.", Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Warning);
return false;
}
return true;
}
private bool CheckOutputFolderExists()
{
if (!Directory.Exists(textBoxOutputFolder.Text))
{
DialogResult result = MessageBox.Show("Your selected output folder does not exist.\r\nWould you like to create it now?", Application.ProductName, MessageBoxButtons.YesNo, MessageBoxIcon.Question);
if (result == DialogResult.Yes)
{
Directory.CreateDirectory(textBoxOutputFolder.Text);
}
else
{
return false;
}
}
return true;
}
private bool CheckOutputFolderIsEmpty()
{
if ((Directory.GetFiles(textBoxOutputFolder.Text).Length > 0) ||
(Directory.GetDirectories(textBoxOutputFolder.Text).Length > 0))
{
MessageBox.Show("Your output folder is not empty.\r\n" +
"You must select an empty folder to store the converted MIDI files.",
Application.ProductName, MessageBoxButtons.OK, MessageBoxIcon.Warning);
return false;
}
return true;
}
private void buttonBrowseEZDrummer_Click(object sender, EventArgs e)
{
FolderBrowserDialog folderBrowser = new FolderBrowserDialog();
folderBrowser.Description = "Select Input Folder";
folderBrowser.SelectedPath = textBoxInputFolder.Text;
if (folderBrowser.ShowDialog() == DialogResult.OK)
{
textBoxInputFolder.Text = folderBrowser.SelectedPath;
}
}
private void buttonBrowseOutputFolder_Click(object sender, EventArgs e)
{
FolderBrowserDialog folderBrowser = new FolderBrowserDialog();
folderBrowser.Description = "Select Output Folder";
folderBrowser.SelectedPath = textBoxOutputFolder.Text;
if (folderBrowser.ShowDialog() == DialogResult.OK)
{
textBoxOutputFolder.Text = folderBrowser.SelectedPath;
if (CheckOutputFolderExists())
{
CheckOutputFolderIsEmpty();
}
}
}
private void exitToolStripMenuItem_Click(object sender, EventArgs e)
{
this.Close();
}
private void contentsToolStripMenuItem_Click(object sender, EventArgs e)
{
string helpFilePath = Path.Combine(Path.GetDirectoryName(Application.ExecutablePath), "midi_file_converter.html");
try
{
System.Diagnostics.Process.Start(helpFilePath);
}
catch (Win32Exception)
{
MessageBox.Show("Could not display the help file", Application.ProductName);
}
}
private void aboutToolStripMenuItem_Click(object sender, EventArgs e)
{
NAudio.Utils.AboutForm aboutForm = new NAudio.Utils.AboutForm();
aboutForm.ShowDialog();
}
private void clearLogToolStripMenuItem_Click(object sender, EventArgs e)
{
progressLog1.ClearLog();
saveLogToolStripMenuItem.Enabled = false;
}
private void optionsToolStripMenuItem_Click(object sender, EventArgs e)
{
if (workQueued)
{
MessageBox.Show("Please wait until the current operation has finished", Application.ProductName);
}
else
{
AdvancedOptionsForm optionsForm = new AdvancedOptionsForm();
optionsForm.ShowDialog();
}
}
private void saveLogToolStripMenuItem_Click(object sender, EventArgs args)
{
if (workQueued)
{
MessageBox.Show("Please wait until the current operation has finished", Application.ProductName);
}
else
{
SaveFileDialog saveFileDialog = new SaveFileDialog();
saveFileDialog.InitialDirectory = textBoxOutputFolder.Text;
saveFileDialog.DefaultExt = ".txt";
saveFileDialog.FileName = "Conversion Log.txt";
saveFileDialog.Filter = "Text Files (*.txt)|*.txt";
saveFileDialog.FilterIndex = 1;
if (saveFileDialog.ShowDialog() == DialogResult.OK)
{
try
{
using (StreamWriter writer = new StreamWriter(saveFileDialog.FileName))
{
string text = progressLog1.Text;
if (!text.Contains("\r"))
{
text = text.Replace("\n", "\r\n");
}
writer.Write(text);
}
}
catch (Exception e)
{
MessageBox.Show(
String.Format("Error saving conversion log\r\n{0}", e.Message),
Application.ProductName,
MessageBoxButtons.OK,
MessageBoxIcon.Error);
}
}
}
}
}
}
| |
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Linq;
using Signum.Utilities;
using Signum.Utilities.ExpressionTrees;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
namespace Signum.Entities.Dynamic
{
[Serializable, EntityKind(EntityKind.Main, EntityData.Master)]
public class DynamicTypeEntity : Entity
{
public DynamicBaseType BaseType { set; get; }
[UniqueIndex]
[StringLengthValidator(Min = 3, Max = 100), IdentifierValidator(IdentifierType.PascalAscii)]
public string TypeName { get; set; }
[DbType(Size = int.MaxValue)]
string typeDefinition;
[StringLengthValidator(Min = 3)]
public string TypeDefinition
{
get { return this.Get(typeDefinition); }
set
{
if (this.Set(ref typeDefinition, value))
this.definition = null;
}
}
[Ignore]
DynamicTypeDefinition? definition;
public DynamicTypeDefinition GetDefinition()
{
return definition ?? JsonConvert.DeserializeObject<DynamicTypeDefinition>(this.TypeDefinition);
}
public void SetDefinition(DynamicTypeDefinition definition)
{
this.TypeDefinition = JsonConvert.SerializeObject(definition);
this.definition = definition;
}
protected override string? PropertyValidation(PropertyInfo pi)
{
if (pi.Name == nameof(TypeDefinition))
{
var def = this.GetDefinition();
return def.Properties
.Where(p => p.Name.HasText() && !IdentifierValidatorAttribute.PascalAscii.IsMatch(p.Name))
.Select(p => ValidationMessage._0DoesNotHaveAValid1IdentifierFormat.NiceToString(p.Name, IdentifierType.PascalAscii))
.ToString("\r\n")
.DefaultToNull();
}
return base.PropertyValidation(pi);
}
[AutoExpressionField]
public override string ToString() => As.Expression(() => TypeName);
}
[AutoInit]
public static class DynamicTypeOperation
{
public static readonly ConstructSymbol<DynamicTypeEntity>.Simple Create;
public static readonly ConstructSymbol<DynamicTypeEntity>.From<DynamicTypeEntity> Clone;
public static readonly ExecuteSymbol<DynamicTypeEntity> Save;
public static readonly DeleteSymbol<DynamicTypeEntity> Delete;
}
public enum DynamicTypeMessage
{
[Description("DynamicType '{0}' successfully saved. Go to DynamicPanel now?")]
DynamicType0SucessfullySavedGoToDynamicPanelNow,
[Description("Server restarted with errors in dynamic code. Fix errors and restart again.")]
ServerRestartedWithErrorsInDynamicCodeFixErrorsAndRestartAgain,
[Description("Remove Save Operation?")]
RemoveSaveOperation,
TheEntityShouldBeSynchronizedToApplyMixins,
}
public class DynamicTypePrimaryKeyDefinition
{
[JsonProperty(PropertyName = "name", NullValueHandling = NullValueHandling.Ignore)]
public string Name;
[JsonProperty(PropertyName = "type", NullValueHandling = NullValueHandling.Ignore)]
public string Type;
[JsonProperty(PropertyName = "identity", DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool Identity;
}
public class DynamicTypeTicksDefinition
{
[JsonProperty(PropertyName = "hasTicks", DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool HasTicks;
[JsonProperty(PropertyName = "name", NullValueHandling = NullValueHandling.Ignore)]
public string Name;
[JsonProperty(PropertyName = "type", NullValueHandling = NullValueHandling.Ignore)]
public string Type;
}
public class DynamicTypeBackMListDefinition
{
//TableNameAttribute
[JsonProperty(PropertyName = "tableName", NullValueHandling = NullValueHandling.Ignore)]
public string TableName;
//PreserveOrderAttribute
[JsonProperty(PropertyName = "preserveOrder")]
public bool PreserveOrder;
[JsonProperty(PropertyName = "orderName", NullValueHandling = NullValueHandling.Ignore)]
public string OrderName;
//
//BackReferenceColumnNameAttribute
[JsonProperty(PropertyName = "backReferenceName", NullValueHandling = NullValueHandling.Ignore)]
public string BackReferenceName;
}
public class DynamicTypeDefinition
{
[JsonProperty(PropertyName = "entityKind", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(StringEnumConverter))]
public EntityKind? EntityKind;
[JsonProperty(PropertyName = "entityData", NullValueHandling = NullValueHandling.Ignore)]
[JsonConverter(typeof(StringEnumConverter))]
public EntityData? EntityData;
[JsonProperty(PropertyName = "tableName", NullValueHandling = NullValueHandling.Ignore)]
public string TableName;
[JsonProperty(PropertyName = "primaryKey", NullValueHandling = NullValueHandling.Ignore)]
public DynamicTypePrimaryKeyDefinition PrimaryKey;
[JsonProperty(PropertyName = "ticks", NullValueHandling = NullValueHandling.Ignore)]
public DynamicTypeTicksDefinition Ticks;
[JsonProperty(PropertyName = "properties")]
public List<DynamicProperty> Properties;
[JsonProperty(PropertyName = "operationCreate")]
public OperationConstruct OperationCreate;
[JsonProperty(PropertyName = "operationSave")]
public OperationExecute OperationSave;
[JsonProperty(PropertyName = "operationDelete")]
public OperationDelete OperationDelete;
[JsonProperty(PropertyName = "operationClone")]
public OperationConstructFrom OperationClone;
[JsonProperty(PropertyName = "customInheritance")]
public DynamicTypeCustomCode CustomInheritance;
[JsonProperty(PropertyName = "customEntityMembers")]
public DynamicTypeCustomCode CustomEntityMembers;
[JsonProperty(PropertyName = "customStartCode")]
public DynamicTypeCustomCode CustomStartCode;
[JsonProperty(PropertyName = "customLogicMembers")]
public DynamicTypeCustomCode CustomLogicMembers;
[JsonProperty(PropertyName = "customTypes")]
public DynamicTypeCustomCode CustomTypes;
[JsonProperty(PropertyName = "customBeforeSchema")]
public DynamicTypeCustomCode CustomBeforeSchema;
[JsonProperty(PropertyName = "queryFields")]
public List<string> QueryFields;
[JsonProperty(PropertyName = "multiColumnUniqueIndex")]
public MultiColumnUniqueIndex MultiColumnUniqueIndex;
[JsonProperty(PropertyName = "toStringExpression", NullValueHandling = NullValueHandling.Ignore)]
public string ToStringExpression;
}
public class MultiColumnUniqueIndex
{
[JsonProperty(PropertyName = "fields")]
public List<string> Fields;
[JsonProperty(PropertyName = "where")]
public string Where;
}
public class OperationConstruct
{
[JsonProperty(PropertyName = "construct")]
public string Construct;
}
public class OperationExecute
{
[JsonProperty(PropertyName = "canExecute")]
public string CanExecute;
[JsonProperty(PropertyName = "execute")]
public string Execute;
}
public class OperationDelete
{
[JsonProperty(PropertyName = "canDelete")]
public string CanDelete;
[JsonProperty(PropertyName = "delete")]
public string Delete;
}
public class OperationConstructFrom
{
[JsonProperty(PropertyName = "canConstruct")]
public string CanConstruct;
[JsonProperty(PropertyName = "construct")]
public string Construct;
}
public class DynamicTypeCustomCode
{
[JsonProperty(PropertyName = "code")]
public string Code;
}
public enum DynamicBaseType
{
Entity,
MixinEntity,
EmbeddedEntity,
ModelEntity,
}
public class DynamicProperty
{
[JsonProperty(PropertyName = "uid")]
public string UID;
[JsonProperty(PropertyName = "name")]
public string Name;
[JsonProperty(PropertyName = "columnName", NullValueHandling = NullValueHandling.Ignore)]
public string ColumnName;
[JsonProperty(PropertyName = "type")]
public string Type;
[JsonProperty(PropertyName = "columnType", NullValueHandling = NullValueHandling.Ignore)]
public string ColumnType;
[JsonProperty(PropertyName = "isNullable")]
public IsNullable IsNullable;
[JsonProperty(PropertyName = "uniqueIndex", DefaultValueHandling = DefaultValueHandling.Include)]
public UniqueIndex UniqueIndex;
[JsonProperty(PropertyName = "isLite", DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool IsLite;
[JsonProperty(PropertyName = "isMList", DefaultValueHandling = DefaultValueHandling.Ignore)]
public DynamicTypeBackMListDefinition IsMList;
[JsonProperty(PropertyName = "size", NullValueHandling = NullValueHandling.Ignore)]
public int? Size;
[JsonProperty(PropertyName = "scale", NullValueHandling = NullValueHandling.Ignore)]
public int? Scale;
[JsonProperty(PropertyName = "unit", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string Unit;
[JsonProperty(PropertyName = "format", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string Format;
[JsonProperty(PropertyName = "notifyChanges", DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool NotifyChanges;
[JsonProperty(PropertyName = "validators", NullValueHandling = NullValueHandling.Ignore)]
public List<DynamicValidator> Validators;
[JsonProperty(PropertyName = "customFieldAttributes", NullValueHandling = NullValueHandling.Ignore)]
public string CustomFieldAttributes;
[JsonProperty(PropertyName = "customPropertyAttributes", NullValueHandling = NullValueHandling.Ignore)]
public string CustomPropertyAttributes;
}
public enum IsNullable
{
Yes,
OnlyInMemory,
No,
}
public enum UniqueIndex
{
No,
Yes,
YesAllowNull,
}
class DynamicValidatorConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return (objectType == typeof(DynamicValidator));
}
public override object? ReadJson(JsonReader reader, Type objectType, object? existingValue, JsonSerializer serializer)
{
JObject obj = JObject.Load(reader);
var type = DynamicValidator.GetDynamicValidatorType(obj.Property("type")!.Value.Value<string>());
object target = Activator.CreateInstance(type)!;
serializer.Populate(obj.CreateReader(), target);
return target;
}
public override bool CanWrite { get { return false; } }
public override void WriteJson(JsonWriter writer, object? value, JsonSerializer serializer)
{
throw new NotImplementedException();
}
}
[JsonConverter(typeof(DynamicValidatorConverter))]
public class DynamicValidator
{
[JsonProperty(PropertyName = "type")]
public string Type;
public static Type GetDynamicValidatorType(string type)
{
switch (type)
{
case "NotNull": return typeof(NotNull);
case "StringLength": return typeof(StringLength);
case "Decimals": return typeof(Decimals);
case "NumberIs": return typeof(NumberIs);
case "CountIs": return typeof(CountIs);
case "NumberBetween": return typeof(NumberBetween);
case "DateTimePrecision": return typeof(DateTimePrecision);
case "TimeSpanPrecision": return typeof(TimeSpanPrecision);
case "StringCase": return typeof(StringCase);
default: return typeof(DynamicValidator);
}
}
public virtual string? ExtraArguments()
{
return null;
}
string Value(object obj)
{
if (obj is decimal)
obj = (double)(decimal)obj;
return CSharpRenderer.Value(obj);
}
public class NotNull : DynamicValidator
{
[JsonProperty(PropertyName = "disabled", DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool Disabled;
public override string? ExtraArguments()
{
return new string?[]
{
Disabled ? "Disabled=true" : null,
}.NotNull().ToString(", ");
}
}
public class StringLength : DynamicValidator
{
[JsonProperty(PropertyName = "multiLine", DefaultValueHandling = DefaultValueHandling.Ignore)]
public bool MultiLine;
[JsonProperty(PropertyName = "min", NullValueHandling = NullValueHandling.Ignore)]
public int? Min;
[JsonProperty(PropertyName = "max", NullValueHandling = NullValueHandling.Ignore)]
public int? Max;
[JsonProperty(PropertyName = "allowLeadingSpaces", NullValueHandling = NullValueHandling.Ignore)]
public bool? AllowLeadingSpaces;
[JsonProperty(PropertyName = "allowTrailingSpaces", NullValueHandling = NullValueHandling.Ignore)]
public bool? AllowTrailingSpaces;
public override string? ExtraArguments()
{
return new string?[]
{
MultiLine ? "MultiLine=true" : null,
Min.HasValue ? "Min=" + Value(Min.Value) : null,
Max.HasValue ? "Max=" + Value(Max.Value) : null,
AllowLeadingSpaces.HasValue ? "AllowLeadingSpaces=" + Value(AllowLeadingSpaces.Value) : null,
AllowTrailingSpaces.HasValue ? "AllowTrailingSpaces=" + Value(AllowTrailingSpaces.Value) : null,
}.NotNull().ToString(", ");
}
}
public class Decimals : DynamicValidator
{
[JsonProperty(PropertyName = "decimalPlaces")]
public int DecimalPlaces;
public override string? ExtraArguments()
{
return Value(DecimalPlaces);
}
}
public class NumberIs : DynamicValidator
{
[JsonProperty(PropertyName = "comparisonType")]
public ComparisonType ComparisonType;
[JsonProperty(PropertyName = "number")]
public decimal Number;
public override string? ExtraArguments()
{
return Value(ComparisonType) + ", " + Value(Number);
}
}
public class CountIs : DynamicValidator
{
[JsonProperty(PropertyName = "comparisonType")]
public ComparisonType ComparisonType;
[JsonProperty(PropertyName = "number")]
public decimal Number;
public override string? ExtraArguments()
{
return Value(ComparisonType) + ", " + Value(Number);
}
}
public class NumberBetween : DynamicValidator
{
[JsonProperty(PropertyName = "min")]
public decimal Min;
[JsonProperty(PropertyName = "max")]
public decimal Max;
public override string? ExtraArguments()
{
return Value(Min) + ", " + Value(Max);
}
}
public class DateTimePrecision : DynamicValidator
{
[JsonProperty(PropertyName = "precision")]
public Signum.Utilities.DateTimePrecision Precision;
public override string? ExtraArguments()
{
return Value(Precision);
}
}
public class TimeSpanPrecision : DynamicValidator
{
[JsonProperty(PropertyName = "precision")]
public Signum.Utilities.DateTimePrecision Precision;
public override string? ExtraArguments()
{
return Value(Precision);
}
}
public class StringCase : DynamicValidator
{
[JsonProperty(PropertyName = "textCase")]
public Signum.Entities.StringCase TextCase;
public override string? ExtraArguments()
{
return Value(TextCase);
}
}
}
}
| |
using System;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
using Decorator.Data;
namespace Decorator.Data.Migrations
{
[DbContext(typeof(ApplicationDbContext))]
partial class ApplicationDbContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
modelBuilder
.HasAnnotation("ProductVersion", "1.0.0-rc2-20901");
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRole", b =>
{
b.Property<string>("Id");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken();
b.Property<string>("Name")
.HasAnnotation("MaxLength", 256);
b.Property<string>("NormalizedName")
.HasAnnotation("MaxLength", 256);
b.HasKey("Id");
b.HasIndex("NormalizedName")
.HasName("RoleNameIndex");
b.ToTable("AspNetRoles");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRoleClaim<string>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd();
b.Property<string>("ClaimType");
b.Property<string>("ClaimValue");
b.Property<string>("RoleId")
.IsRequired();
b.HasKey("Id");
b.HasIndex("RoleId");
b.ToTable("AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserClaim<string>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd();
b.Property<string>("ClaimType");
b.Property<string>("ClaimValue");
b.Property<string>("UserId")
.IsRequired();
b.HasKey("Id");
b.HasIndex("UserId");
b.ToTable("AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserLogin<string>", b =>
{
b.Property<string>("LoginProvider");
b.Property<string>("ProviderKey");
b.Property<string>("ProviderDisplayName");
b.Property<string>("UserId")
.IsRequired();
b.HasKey("LoginProvider", "ProviderKey");
b.HasIndex("UserId");
b.ToTable("AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserRole<string>", b =>
{
b.Property<string>("UserId");
b.Property<string>("RoleId");
b.HasKey("UserId", "RoleId");
b.HasIndex("RoleId");
b.HasIndex("UserId");
b.ToTable("AspNetUserRoles");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserToken<string>", b =>
{
b.Property<string>("UserId");
b.Property<string>("LoginProvider");
b.Property<string>("Name");
b.Property<string>("Value");
b.HasKey("UserId", "LoginProvider", "Name");
b.ToTable("AspNetUserTokens");
});
modelBuilder.Entity("Decorator.Models.ApplicationUser", b =>
{
b.Property<string>("Id");
b.Property<int>("AccessFailedCount");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken();
b.Property<string>("Email")
.HasAnnotation("MaxLength", 256);
b.Property<bool>("EmailConfirmed");
b.Property<bool>("LockoutEnabled");
b.Property<DateTimeOffset?>("LockoutEnd");
b.Property<string>("NormalizedEmail")
.HasAnnotation("MaxLength", 256);
b.Property<string>("NormalizedUserName")
.HasAnnotation("MaxLength", 256);
b.Property<string>("PasswordHash");
b.Property<string>("PhoneNumber");
b.Property<bool>("PhoneNumberConfirmed");
b.Property<string>("SecurityStamp");
b.Property<bool>("TwoFactorEnabled");
b.Property<string>("UserName")
.HasAnnotation("MaxLength", 256);
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasName("EmailIndex");
b.HasIndex("NormalizedUserName")
.HasName("UserNameIndex");
b.ToTable("AspNetUsers");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRoleClaim<string>", b =>
{
b.HasOne("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRole")
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserClaim<string>", b =>
{
b.HasOne("Decorator.Models.ApplicationUser")
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserLogin<string>", b =>
{
b.HasOne("Decorator.Models.ApplicationUser")
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityUserRole<string>", b =>
{
b.HasOne("Microsoft.AspNetCore.Identity.EntityFrameworkCore.IdentityRole")
.WithMany()
.HasForeignKey("RoleId")
.OnDelete(DeleteBehavior.Cascade);
b.HasOne("Decorator.Models.ApplicationUser")
.WithMany()
.HasForeignKey("UserId")
.OnDelete(DeleteBehavior.Cascade);
});
}
}
}
| |
// This file was created automatically, do not modify the contents of this file.
// ReSharper disable InvalidXmlDocComment
// ReSharper disable InconsistentNaming
// ReSharper disable CheckNamespace
// ReSharper disable MemberCanBePrivate.Global
using System;
using System.Runtime.InteropServices;
// Source file C:\Program Files\Epic Games\UE_4.22\Engine\Source\Runtime\Engine\Classes\GameFramework\HUD.h:36
namespace UnrealEngine
{
[ManageType("ManageHUD")]
public partial class ManageHUD : AHUD, IManageWrapper
{
public ManageHUD(IntPtr adress)
: base(adress)
{
}
#region DLLInmport
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_DrawHUD(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_DrawSafeZoneOverlay(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_HandleBugScreenShot(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_NextDebugTarget(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_NotifyBindPostProcessEffects(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnLostFocusPause(IntPtr self, bool bEnable);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostRender(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PreviousDebugTarget(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ShowDebugInfo(IntPtr self, float yL, float yPos);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ShowHUD(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_BeginPlay(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ClearCrossLevelReferences(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_Destroyed(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ForceNetRelevant(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ForceNetUpdate(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_GatherCurrentMovement(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_InvalidateLightingCacheDetailed(IntPtr self, bool bTranslationOnly);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_K2_DestroyActor(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_LifeSpanExpired(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_MarkComponentsAsPendingKill(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_NotifyActorBeginCursorOver(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_NotifyActorEndCursorOver(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnRep_AttachmentReplication(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnRep_Instigator(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnRep_Owner(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnRep_ReplicatedMovement(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnRep_ReplicateMovement(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnReplicationPausedChanged(IntPtr self, bool bIsReplicationPaused);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OutsideWorldBounds(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostActorCreated(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostInitializeComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostNetInit(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostNetReceiveLocationAndRotation(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostNetReceivePhysicState(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostNetReceiveRole(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostRegisterAllComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostUnregisterAllComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PreInitializeComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PreRegisterAllComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PrestreamTextures(IntPtr self, float seconds, bool bEnableStreaming, int cinematicTextureGroups);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_RegisterActorTickFunctions(IntPtr self, bool bRegister);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_RegisterAllComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ReregisterAllComponents(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_RerunConstructionScripts(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_Reset(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_RewindForReplay(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_SetActorHiddenInGame(IntPtr self, bool bNewHidden);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_SetLifeSpan(IntPtr self, float inLifespan);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_SetReplicateMovement(IntPtr self, bool bInReplicateMovement);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_TearOff(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_TeleportSucceeded(IntPtr self, bool bIsATest);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_Tick(IntPtr self, float deltaSeconds);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_TornOff(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_UnregisterAllComponents(IntPtr self, bool bForReregister);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_BeginDestroy(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_FinishDestroy(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_MarkAsEditorOnlySubobject(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostCDOContruct(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostEditImport(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostInitProperties(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostLoad(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostNetReceive(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostRepNotifies(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PostSaveRoot(IntPtr self, bool bCleanupIsRequired);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PreDestroyFromReplication(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_PreNetReceive(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_ShutdownAfterError(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_CreateCluster(IntPtr self);
[DllImport(NativeManager.UnrealDotNetDll, CallingConvention = CallingConvention.Cdecl)]
private static extern void E__Supper__AHUD_OnClusterMarkedAsPendingKill(IntPtr self);
#endregion
#region Methods
/// <summary>
/// The Main Draw loop for the hud. Gets called before any messaging. Should be subclassed
/// </summary>
public override void DrawHUD()
=> E__Supper__AHUD_DrawHUD(this);
/// <summary>
/// Draw the safe zone debugging overlay when enabled
/// </summary>
public override void DrawSafeZoneOverlay()
=> E__Supper__AHUD_DrawSafeZoneOverlay(this);
/// <summary>
/// Gives the HUD a chance to display project-specific data when taking a "bug" screen shot.
/// </summary>
public override void HandleBugScreenShot()
=> E__Supper__AHUD_HandleBugScreenShot(this);
public override void NextDebugTarget()
=> E__Supper__AHUD_NextDebugTarget(this);
/// <summary>
/// Called in PostInitializeComponents or postprocessing chain has changed (happens because of the worldproperties can define it's own chain and this one is set late).
/// </summary>
public override void NotifyBindPostProcessEffects()
=> E__Supper__AHUD_NotifyBindPostProcessEffects(this);
/// <summary>
/// Pauses or unpauses the game due to main window's focus being lost.
/// </summary>
/// <param name="enable">tells whether to enable or disable the pause state</param>
public override void OnLostFocusPause(bool bEnable)
=> E__Supper__AHUD_OnLostFocusPause(this, bEnable);
/// <summary>
/// PostRender is the main draw loop.
/// </summary>
public override void PostRender()
=> E__Supper__AHUD_PostRender(this);
public override void PreviousDebugTarget()
=> E__Supper__AHUD_PreviousDebugTarget(this);
/// <summary>
/// Entry point for basic debug rendering on the HUD. Activated and controlled via the "showdebug" console command.
/// <para>Can be overridden to display custom debug per-game. </para>
/// </summary>
public override void ShowDebugInfo(float yL, float yPos)
=> E__Supper__AHUD_ShowDebugInfo(this, yL, yPos);
public override void ShowHUD()
=> E__Supper__AHUD_ShowHUD(this);
/// <summary>
/// Overridable native event for when play begins for this actor.
/// </summary>
protected override void BeginPlay()
=> E__Supper__AHUD_BeginPlay(this);
/// <summary>
/// Do anything needed to clear out cross level references; Called from ULevel::PreSave
/// </summary>
public override void ClearCrossLevelReferences()
=> E__Supper__AHUD_ClearCrossLevelReferences(this);
/// <summary>
/// Called when this actor is explicitly being destroyed during gameplay or in the editor, not called during level streaming or gameplay ending
/// </summary>
public override void Destroyed()
=> E__Supper__AHUD_Destroyed(this);
/// <summary>
/// Forces this actor to be net relevant if it is not already by default
/// </summary>
public override void ForceNetRelevant()
=> E__Supper__AHUD_ForceNetRelevant(this);
/// <summary>
/// Force actor to be updated to clients/demo net drivers
/// </summary>
public override void ForceNetUpdate()
=> E__Supper__AHUD_ForceNetUpdate(this);
/// <summary>
/// Fills ReplicatedMovement property
/// </summary>
public override void GatherCurrentMovement()
=> E__Supper__AHUD_GatherCurrentMovement(this);
/// <summary>
/// Invalidates anything produced by the last lighting build.
/// </summary>
public override void InvalidateLightingCacheDetailed(bool bTranslationOnly)
=> E__Supper__AHUD_InvalidateLightingCacheDetailed(this, bTranslationOnly);
/// <summary>
/// Destroy the actor
/// </summary>
public override void DestroyActor()
=> E__Supper__AHUD_K2_DestroyActor(this);
/// <summary>
/// Called when the lifespan of an actor expires (if he has one).
/// </summary>
public override void LifeSpanExpired()
=> E__Supper__AHUD_LifeSpanExpired(this);
/// <summary>
/// Called to mark all components as pending kill when the actor is being destroyed
/// </summary>
public override void MarkComponentsAsPendingKill()
=> E__Supper__AHUD_MarkComponentsAsPendingKill(this);
/// <summary>
/// Event when this actor has the mouse moved over it with the clickable interface.
/// </summary>
public override void NotifyActorBeginCursorOver()
=> E__Supper__AHUD_NotifyActorBeginCursorOver(this);
/// <summary>
/// Event when this actor has the mouse moved off of it with the clickable interface.
/// </summary>
public override void NotifyActorEndCursorOver()
=> E__Supper__AHUD_NotifyActorEndCursorOver(this);
public override void OnRep_AttachmentReplication()
=> E__Supper__AHUD_OnRep_AttachmentReplication(this);
public override void OnRep_Instigator()
=> E__Supper__AHUD_OnRep_Instigator(this);
protected override void OnRep_Owner()
=> E__Supper__AHUD_OnRep_Owner(this);
public override void OnRep_ReplicatedMovement()
=> E__Supper__AHUD_OnRep_ReplicatedMovement(this);
public override void OnRep_ReplicateMovement()
=> E__Supper__AHUD_OnRep_ReplicateMovement(this);
/// <summary>
/// Called on the client when the replication paused value is changed
/// </summary>
public override void OnReplicationPausedChanged(bool bIsReplicationPaused)
=> E__Supper__AHUD_OnReplicationPausedChanged(this, bIsReplicationPaused);
/// <summary>
/// Called when the Actor is outside the hard limit on world bounds
/// </summary>
public override void OutsideWorldBounds()
=> E__Supper__AHUD_OutsideWorldBounds(this);
/// <summary>
/// Called when an actor is done spawning into the world (from UWorld::SpawnActor), both in the editor and during gameplay
/// <para>For actors with a root component, the location and rotation will have already been set. </para>
/// This is called before calling construction scripts, but after native components have been created
/// </summary>
public override void PostActorCreated()
=> E__Supper__AHUD_PostActorCreated(this);
/// <summary>
/// Allow actors to initialize themselves on the C++ side after all of their components have been initialized, only called during gameplay
/// </summary>
public override void PostInitializeComponents()
=> E__Supper__AHUD_PostInitializeComponents(this);
/// <summary>
/// Always called immediately after spawning and reading in replicated properties
/// </summary>
public override void PostNetInit()
=> E__Supper__AHUD_PostNetInit(this);
/// <summary>
/// Update location and rotation from ReplicatedMovement. Not called for simulated physics!
/// </summary>
public override void PostNetReceiveLocationAndRotation()
=> E__Supper__AHUD_PostNetReceiveLocationAndRotation(this);
/// <summary>
/// Update and smooth simulated physic state, replaces PostNetReceiveLocation() and PostNetReceiveVelocity()
/// </summary>
public override void PostNetReceivePhysicState()
=> E__Supper__AHUD_PostNetReceivePhysicState(this);
/// <summary>
/// Always called immediately after a new Role is received from the remote.
/// </summary>
public override void PostNetReceiveRole()
=> E__Supper__AHUD_PostNetReceiveRole(this);
/// <summary>
/// Called after all the components in the Components array are registered, called both in editor and during gameplay
/// </summary>
public override void PostRegisterAllComponents()
=> E__Supper__AHUD_PostRegisterAllComponents(this);
/// <summary>
/// Called after all currently registered components are cleared
/// </summary>
public override void PostUnregisterAllComponents()
=> E__Supper__AHUD_PostUnregisterAllComponents(this);
/// <summary>
/// Called right before components are initialized, only called during gameplay
/// </summary>
public override void PreInitializeComponents()
=> E__Supper__AHUD_PreInitializeComponents(this);
/// <summary>
/// Called before all the components in the Components array are registered, called both in editor and during gameplay
/// </summary>
public override void PreRegisterAllComponents()
=> E__Supper__AHUD_PreRegisterAllComponents(this);
/// <summary>
/// Calls PrestreamTextures() for all the actor's meshcomponents.
/// </summary>
/// <param name="seconds">Number of seconds to force all mip-levels to be resident</param>
/// <param name="bEnableStreaming">Whether to start (true) or stop (false) streaming</param>
/// <param name="cinematicTextureGroups">Bitfield indicating which texture groups that use extra high-resolution mips</param>
public override void PrestreamTextures(float seconds, bool bEnableStreaming, int cinematicTextureGroups)
=> E__Supper__AHUD_PrestreamTextures(this, seconds, bEnableStreaming, cinematicTextureGroups);
/// <summary>
/// Virtual call chain to register all tick functions for the actor class hierarchy
/// </summary>
/// <param name="bRegister">true to register, false, to unregister</param>
protected override void RegisterActorTickFunctions(bool bRegister)
=> E__Supper__AHUD_RegisterActorTickFunctions(this, bRegister);
/// <summary>
/// Ensure that all the components in the Components array are registered
/// </summary>
public override void RegisterAllComponents()
=> E__Supper__AHUD_RegisterAllComponents(this);
/// <summary>
/// Will reregister all components on this actor. Does a lot of work - should only really be used in editor, generally use UpdateComponentTransforms or MarkComponentsRenderStateDirty.
/// </summary>
public override void ReregisterAllComponents()
=> E__Supper__AHUD_ReregisterAllComponents(this);
/// <summary>
/// Rerun construction scripts, destroying all autogenerated components; will attempt to preserve the root component location.
/// </summary>
public override void RerunConstructionScripts()
=> E__Supper__AHUD_RerunConstructionScripts(this);
/// <summary>
/// Reset actor to initial state - used when restarting level without reloading.
/// </summary>
public override void Reset()
=> E__Supper__AHUD_Reset(this);
/// <summary>
/// Called on the actor before checkpoint data is applied during a replay.
/// <para>Only called if bReplayRewindable is set. </para>
/// </summary>
public override void RewindForReplay()
=> E__Supper__AHUD_RewindForReplay(this);
/// <summary>
/// Sets the actor to be hidden in the game
/// </summary>
/// <param name="bNewHidden">Whether or not to hide the actor and all its components</param>
public override void SetActorHiddenInGame(bool bNewHidden)
=> E__Supper__AHUD_SetActorHiddenInGame(this, bNewHidden);
/// <summary>
/// Set the lifespan of this actor. When it expires the object will be destroyed. If requested lifespan is 0, the timer is cleared and the actor will not be destroyed.
/// </summary>
public override void SetLifeSpan(float inLifespan)
=> E__Supper__AHUD_SetLifeSpan(this, inLifespan);
/// <summary>
/// Set whether this actor's movement replicates to network clients.
/// </summary>
/// <param name="bInReplicateMovement">Whether this Actor's movement replicates to clients.</param>
public override void SetReplicateMovement(bool bInReplicateMovement)
=> E__Supper__AHUD_SetReplicateMovement(this, bInReplicateMovement);
/// <summary>
/// Networking - Server - TearOff this actor to stop replication to clients. Will set bTearOff to true.
/// </summary>
public override void TearOff()
=> E__Supper__AHUD_TearOff(this);
/// <summary>
/// Called from TeleportTo() when teleport succeeds
/// </summary>
public override void TeleportSucceeded(bool bIsATest)
=> E__Supper__AHUD_TeleportSucceeded(this, bIsATest);
/// <summary>
/// Function called every frame on this Actor. Override this function to implement custom logic to be executed every frame.
/// <para>Note that Tick is disabled by default, and you will need to check PrimaryActorTick.bCanEverTick is set to true to enable it. </para>
/// </summary>
/// <param name="deltaSeconds">Game time elapsed during last frame modified by the time dilation</param>
public override void Tick(float deltaSeconds)
=> E__Supper__AHUD_Tick(this, deltaSeconds);
/// <summary>
/// Networking - called on client when actor is torn off (bTearOff==true), meaning it's no longer replicated to clients.
/// <para>@see bTearOff </para>
/// </summary>
public override void TornOff()
=> E__Supper__AHUD_TornOff(this);
/// <summary>
/// Unregister all currently registered components
/// </summary>
/// <param name="bForReregister">If true, RegisterAllComponents will be called immediately after this so some slow operations can be avoided</param>
public override void UnregisterAllComponents(bool bForReregister)
=> E__Supper__AHUD_UnregisterAllComponents(this, bForReregister);
/// <summary>
/// Called before destroying the object. This is called immediately upon deciding to destroy the object, to allow the object to begin an
/// <para>asynchronous cleanup process. </para>
/// </summary>
public override void BeginDestroy()
=> E__Supper__AHUD_BeginDestroy(this);
/// <summary>
/// Called to finish destroying the object. After UObject::FinishDestroy is called, the object's memory should no longer be accessed.
/// <para>@warning Because properties are destroyed here, Super::FinishDestroy() should always be called at the end of your child class's FinishDestroy() method, rather than at the beginning. </para>
/// </summary>
public override void FinishDestroy()
=> E__Supper__AHUD_FinishDestroy(this);
/// <summary>
/// Called during subobject creation to mark this component as editor only, which causes it to get stripped in packaged builds
/// </summary>
public override void MarkAsEditorOnlySubobject()
=> E__Supper__AHUD_MarkAsEditorOnlySubobject(this);
/// <summary>
/// Called after the C++ constructor has run on the CDO for a class. This is an obscure routine used to deal with the recursion
/// <para>in the construction of the default materials </para>
/// </summary>
public override void PostCDOContruct()
=> E__Supper__AHUD_PostCDOContruct(this);
/// <summary>
/// Called after importing property values for this object (paste, duplicate or .t3d import)
/// <para>Allow the object to perform any cleanup for properties which shouldn't be duplicated or </para>
/// are unsupported by the script serialization
/// </summary>
public override void PostEditImport()
=> E__Supper__AHUD_PostEditImport(this);
/// <summary>
/// Called after the C++ constructor and after the properties have been initialized, including those loaded from config.
/// <para>This is called before any serialization or other setup has happened. </para>
/// </summary>
public override void PostInitProperties()
=> E__Supper__AHUD_PostInitProperties(this);
/// <summary>
/// Do any object-specific cleanup required immediately after loading an object.
/// <para>This is not called for newly-created objects, and by default will always execute on the game thread. </para>
/// </summary>
public override void PostLoad()
=> E__Supper__AHUD_PostLoad(this);
/// <summary>
/// Called right after receiving a bunch
/// </summary>
public override void PostNetReceive()
=> E__Supper__AHUD_PostNetReceive(this);
/// <summary>
/// Called right after calling all OnRep notifies (called even when there are no notifies)
/// </summary>
public override void PostRepNotifies()
=> E__Supper__AHUD_PostRepNotifies(this);
/// <summary>
/// Called from within SavePackage on the passed in base/root object.
/// <para>This function is called after the package has been saved and can perform cleanup. </para>
/// </summary>
/// <param name="bCleanupIsRequired">Whether PreSaveRoot dirtied state that needs to be cleaned up</param>
public override void PostSaveRoot(bool bCleanupIsRequired)
=> E__Supper__AHUD_PostSaveRoot(this, bCleanupIsRequired);
/// <summary>
/// Called right before being marked for destruction due to network replication
/// </summary>
public override void PreDestroyFromReplication()
=> E__Supper__AHUD_PreDestroyFromReplication(this);
/// <summary>
/// Called right before receiving a bunch
/// </summary>
public override void PreNetReceive()
=> E__Supper__AHUD_PreNetReceive(this);
/// <summary>
/// After a critical error, perform any mission-critical cleanup, such as restoring the video mode orreleasing hardware resources.
/// </summary>
public override void ShutdownAfterError()
=> E__Supper__AHUD_ShutdownAfterError(this);
/// <summary>
/// Called after PostLoad to create UObject cluster
/// </summary>
public override void CreateCluster()
=> E__Supper__AHUD_CreateCluster(this);
/// <summary>
/// Called during Garbage Collection to perform additional cleanup when the cluster is about to be destroyed due to PendingKill flag being set on it.
/// </summary>
public override void OnClusterMarkedAsPendingKill()
=> E__Supper__AHUD_OnClusterMarkedAsPendingKill(this);
#endregion
public static implicit operator IntPtr(ManageHUD self)
{
return self?.NativePointer ?? IntPtr.Zero;
}
public static implicit operator ManageHUD(ObjectPointerDescription PtrDesc)
{
return NativeManager.GetWrapper<ManageHUD>(PtrDesc);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Reflection;
namespace System.Runtime.Serialization
{
internal class XmlObjectSerializerReadContextComplex : XmlObjectSerializerReadContext
{
private bool _preserveObjectReferences;
private SerializationMode _mode;
private ISerializationSurrogateProvider _serializationSurrogateProvider;
internal XmlObjectSerializerReadContextComplex(DataContractSerializer serializer, DataContract rootTypeDataContract, DataContractResolver dataContractResolver)
: base(serializer, rootTypeDataContract, dataContractResolver)
{
_mode = SerializationMode.SharedContract;
_preserveObjectReferences = serializer.PreserveObjectReferences;
_serializationSurrogateProvider = serializer.SerializationSurrogateProvider;
}
internal XmlObjectSerializerReadContextComplex(XmlObjectSerializer serializer, int maxItemsInObjectGraph, StreamingContext streamingContext, bool ignoreExtensionDataObject)
: base(serializer, maxItemsInObjectGraph, streamingContext, ignoreExtensionDataObject)
{
}
internal override SerializationMode Mode
{
get { return _mode; }
}
internal override object InternalDeserialize(XmlReaderDelegator xmlReader, int declaredTypeID, RuntimeTypeHandle declaredTypeHandle, string name, string ns)
{
if (_mode == SerializationMode.SharedContract)
{
if (_serializationSurrogateProvider == null)
return base.InternalDeserialize(xmlReader, declaredTypeID, declaredTypeHandle, name, ns);
else
return InternalDeserializeWithSurrogate(xmlReader, Type.GetTypeFromHandle(declaredTypeHandle), null /*surrogateDataContract*/, name, ns);
}
else
{
return InternalDeserializeInSharedTypeMode(xmlReader, declaredTypeID, Type.GetTypeFromHandle(declaredTypeHandle), name, ns);
}
}
internal override object InternalDeserialize(XmlReaderDelegator xmlReader, Type declaredType, string name, string ns)
{
if (_mode == SerializationMode.SharedContract)
{
if (_serializationSurrogateProvider == null)
return base.InternalDeserialize(xmlReader, declaredType, name, ns);
else
return InternalDeserializeWithSurrogate(xmlReader, declaredType, null /*surrogateDataContract*/, name, ns);
}
else
{
return InternalDeserializeInSharedTypeMode(xmlReader, -1, declaredType, name, ns);
}
}
internal override object InternalDeserialize(XmlReaderDelegator xmlReader, Type declaredType, DataContract dataContract, string name, string ns)
{
if (_mode == SerializationMode.SharedContract)
{
if (_serializationSurrogateProvider == null)
return base.InternalDeserialize(xmlReader, declaredType, dataContract, name, ns);
else
return InternalDeserializeWithSurrogate(xmlReader, declaredType, dataContract, name, ns);
}
else
{
return InternalDeserializeInSharedTypeMode(xmlReader, -1, declaredType, name, ns);
}
}
private object InternalDeserializeInSharedTypeMode(XmlReaderDelegator xmlReader, int declaredTypeID, Type declaredType, string name, string ns)
{
object retObj = null;
if (TryHandleNullOrRef(xmlReader, declaredType, name, ns, ref retObj))
return retObj;
DataContract dataContract;
string assemblyName = attributes.ClrAssembly;
string typeName = attributes.ClrType;
if (assemblyName != null && typeName != null)
{
Assembly assembly;
Type type;
dataContract = ResolveDataContractInSharedTypeMode(assemblyName, typeName, out assembly, out type);
if (dataContract == null)
{
if (assembly == null)
throw XmlObjectSerializer.CreateSerializationException(SR.Format(SR.AssemblyNotFound, assemblyName));
if (type == null)
throw XmlObjectSerializer.CreateSerializationException(SR.Format(SR.ClrTypeNotFound, assembly.FullName, typeName));
}
//Array covariance is not supported in XSD. If declared type is array, data is sent in format of base array
if (declaredType != null && declaredType.IsArray)
dataContract = (declaredTypeID < 0) ? GetDataContract(declaredType) : GetDataContract(declaredTypeID, declaredType.TypeHandle);
}
else
{
if (assemblyName != null)
throw XmlObjectSerializer.CreateSerializationException(XmlObjectSerializer.TryAddLineInfo(xmlReader, SR.Format(SR.AttributeNotFound, Globals.SerializationNamespace, Globals.ClrTypeLocalName, xmlReader.NodeType, xmlReader.NamespaceURI, xmlReader.LocalName)));
else if (typeName != null)
throw XmlObjectSerializer.CreateSerializationException(XmlObjectSerializer.TryAddLineInfo(xmlReader, SR.Format(SR.AttributeNotFound, Globals.SerializationNamespace, Globals.ClrAssemblyLocalName, xmlReader.NodeType, xmlReader.NamespaceURI, xmlReader.LocalName)));
else if (declaredType == null)
throw XmlObjectSerializer.CreateSerializationException(XmlObjectSerializer.TryAddLineInfo(xmlReader, SR.Format(SR.AttributeNotFound, Globals.SerializationNamespace, Globals.ClrTypeLocalName, xmlReader.NodeType, xmlReader.NamespaceURI, xmlReader.LocalName)));
dataContract = (declaredTypeID < 0) ? GetDataContract(declaredType) : GetDataContract(declaredTypeID, declaredType.TypeHandle);
}
return ReadDataContractValue(dataContract, xmlReader);
}
private object InternalDeserializeWithSurrogate(XmlReaderDelegator xmlReader, Type declaredType, DataContract surrogateDataContract, string name, string ns)
{
DataContract dataContract = surrogateDataContract ??
GetDataContract(DataContractSurrogateCaller.GetDataContractType(_serializationSurrogateProvider, declaredType));
if (this.IsGetOnlyCollection && dataContract.UnderlyingType != declaredType)
{
throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataContractException(SR.Format(SR.SurrogatesWithGetOnlyCollectionsNotSupportedSerDeser, DataContract.GetClrTypeFullName(declaredType))));
}
ReadAttributes(xmlReader);
string objectId = GetObjectId();
object oldObj = InternalDeserialize(xmlReader, name, ns, declaredType, ref dataContract);
object obj = DataContractSurrogateCaller.GetDeserializedObject(_serializationSurrogateProvider, oldObj, dataContract.UnderlyingType, declaredType);
ReplaceDeserializedObject(objectId, oldObj, obj);
return obj;
}
private Type ResolveDataContractTypeInSharedTypeMode(string assemblyName, string typeName, out Assembly assembly)
{
// The method is used only when _mode == SerializationMode.SharedType.
// _mode is set to SerializationMode.SharedType only when the context is for NetDataContractSerializer.
throw new PlatformNotSupportedException(SR.PlatformNotSupported_NetDataContractSerializer);
}
private DataContract ResolveDataContractInSharedTypeMode(string assemblyName, string typeName, out Assembly assembly, out Type type)
{
type = ResolveDataContractTypeInSharedTypeMode(assemblyName, typeName, out assembly);
if (type != null)
{
return GetDataContract(type);
}
return null;
}
protected override DataContract ResolveDataContractFromTypeName()
{
if (_mode == SerializationMode.SharedContract)
{
return base.ResolveDataContractFromTypeName();
}
else
{
if (attributes.ClrAssembly != null && attributes.ClrType != null)
{
Assembly assembly;
Type type;
return ResolveDataContractInSharedTypeMode(attributes.ClrAssembly, attributes.ClrType, out assembly, out type);
}
}
return null;
}
internal override void CheckIfTypeSerializable(Type memberType, bool isMemberTypeSerializable)
{
if (_serializationSurrogateProvider != null)
{
while (memberType.IsArray)
memberType = memberType.GetElementType();
memberType = DataContractSurrogateCaller.GetDataContractType(_serializationSurrogateProvider, memberType);
if (!DataContract.IsTypeSerializable(memberType))
throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataContractException(SR.Format(SR.TypeNotSerializable, memberType)));
return;
}
base.CheckIfTypeSerializable(memberType, isMemberTypeSerializable);
}
internal override Type GetSurrogatedType(Type type)
{
if (_serializationSurrogateProvider == null)
{
return base.GetSurrogatedType(type);
}
else
{
type = DataContract.UnwrapNullableType(type);
Type surrogateType = DataContractSerializer.GetSurrogatedType(_serializationSurrogateProvider, type);
if (this.IsGetOnlyCollection && surrogateType != type)
{
throw System.Runtime.Serialization.DiagnosticUtility.ExceptionUtility.ThrowHelperError(new InvalidDataContractException(SR.Format(SR.SurrogatesWithGetOnlyCollectionsNotSupportedSerDeser,
DataContract.GetClrTypeFullName(type))));
}
else
{
return surrogateType;
}
}
}
#if USE_REFEMIT
public override int GetArraySize()
#else
internal override int GetArraySize()
#endif
{
return _preserveObjectReferences ? attributes.ArraySZSize : -1;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Versions;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.SolutionCrawler
{
internal partial class SolutionCrawlerRegistrationService
{
private partial class WorkCoordinator
{
private const int MinimumDelayInMS = 50;
private readonly Registration _registration;
private readonly LogAggregator _logAggregator;
private readonly IAsynchronousOperationListener _listener;
private readonly IOptionService _optionService;
private readonly CancellationTokenSource _shutdownNotificationSource;
private readonly CancellationToken _shutdownToken;
private readonly SimpleTaskQueue _eventProcessingQueue;
// points to processor task
private readonly IncrementalAnalyzerProcessor _documentAndProjectWorkerProcessor;
private readonly SemanticChangeProcessor _semanticChangeProcessor;
public WorkCoordinator(
IAsynchronousOperationListener listener,
IEnumerable<Lazy<IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>> analyzerProviders,
Registration registration)
{
_logAggregator = new LogAggregator();
_registration = registration;
_listener = listener;
_optionService = _registration.GetService<IOptionService>();
// event and worker queues
_shutdownNotificationSource = new CancellationTokenSource();
_shutdownToken = _shutdownNotificationSource.Token;
_eventProcessingQueue = new SimpleTaskQueue(TaskScheduler.Default);
var activeFileBackOffTimeSpanInMS = _optionService.GetOption(InternalSolutionCrawlerOptions.ActiveFileWorkerBackOffTimeSpanInMS);
var allFilesWorkerBackOffTimeSpanInMS = _optionService.GetOption(InternalSolutionCrawlerOptions.AllFilesWorkerBackOffTimeSpanInMS);
var entireProjectWorkerBackOffTimeSpanInMS = _optionService.GetOption(InternalSolutionCrawlerOptions.EntireProjectWorkerBackOffTimeSpanInMS);
_documentAndProjectWorkerProcessor = new IncrementalAnalyzerProcessor(
listener, analyzerProviders, _registration,
activeFileBackOffTimeSpanInMS, allFilesWorkerBackOffTimeSpanInMS, entireProjectWorkerBackOffTimeSpanInMS, _shutdownToken);
var semanticBackOffTimeSpanInMS = _optionService.GetOption(InternalSolutionCrawlerOptions.SemanticChangeBackOffTimeSpanInMS);
var projectBackOffTimeSpanInMS = _optionService.GetOption(InternalSolutionCrawlerOptions.ProjectPropagationBackOffTimeSpanInMS);
_semanticChangeProcessor = new SemanticChangeProcessor(listener, _registration, _documentAndProjectWorkerProcessor, semanticBackOffTimeSpanInMS, projectBackOffTimeSpanInMS, _shutdownToken);
// if option is on
if (_optionService.GetOption(InternalSolutionCrawlerOptions.SolutionCrawler))
{
_registration.Workspace.WorkspaceChanged += OnWorkspaceChanged;
_registration.Workspace.DocumentOpened += OnDocumentOpened;
_registration.Workspace.DocumentClosed += OnDocumentClosed;
}
// subscribe to option changed event after all required fields are set
// otherwise, we can get null exception when running OnOptionChanged handler
_optionService.OptionChanged += OnOptionChanged;
}
public int CorrelationId => _registration.CorrelationId;
public void AddAnalyzer(IIncrementalAnalyzer analyzer, bool highPriorityForActiveFile)
{
// add analyzer
_documentAndProjectWorkerProcessor.AddAnalyzer(analyzer, highPriorityForActiveFile);
// and ask to re-analyze whole solution for the given analyzer
var set = _registration.CurrentSolution.Projects.SelectMany(p => p.DocumentIds).ToSet();
Reanalyze(analyzer, set);
}
public void Shutdown(bool blockingShutdown)
{
_optionService.OptionChanged -= OnOptionChanged;
// detach from the workspace
_registration.Workspace.WorkspaceChanged -= OnWorkspaceChanged;
_registration.Workspace.DocumentOpened -= OnDocumentOpened;
_registration.Workspace.DocumentClosed -= OnDocumentClosed;
// cancel any pending blocks
_shutdownNotificationSource.Cancel();
_documentAndProjectWorkerProcessor.Shutdown();
SolutionCrawlerLogger.LogWorkCoordinatorShutdown(CorrelationId, _logAggregator);
if (blockingShutdown)
{
var shutdownTask = Task.WhenAll(
_eventProcessingQueue.LastScheduledTask,
_documentAndProjectWorkerProcessor.AsyncProcessorTask,
_semanticChangeProcessor.AsyncProcessorTask);
shutdownTask.Wait(TimeSpan.FromSeconds(5));
if (!shutdownTask.IsCompleted)
{
SolutionCrawlerLogger.LogWorkCoordinatorShutdownTimeout(CorrelationId);
}
}
}
private void OnOptionChanged(object sender, OptionChangedEventArgs e)
{
// if solution crawler got turned off or on.
if (e.Option == InternalSolutionCrawlerOptions.SolutionCrawler)
{
var value = (bool)e.Value;
if (value)
{
_registration.Workspace.WorkspaceChanged += OnWorkspaceChanged;
_registration.Workspace.DocumentOpened += OnDocumentOpened;
_registration.Workspace.DocumentClosed += OnDocumentClosed;
}
else
{
_registration.Workspace.WorkspaceChanged -= OnWorkspaceChanged;
_registration.Workspace.DocumentOpened -= OnDocumentOpened;
_registration.Workspace.DocumentClosed -= OnDocumentClosed;
}
SolutionCrawlerLogger.LogOptionChanged(CorrelationId, value);
return;
}
ReanalyzeOnOptionChange(sender, e);
}
private void ReanalyzeOnOptionChange(object sender, OptionChangedEventArgs e)
{
// otherwise, let each analyzer decide what they want on option change
ISet<DocumentId> set = null;
foreach (var analyzer in _documentAndProjectWorkerProcessor.Analyzers)
{
if (analyzer.NeedsReanalysisOnOptionChanged(sender, e))
{
set = set ?? _registration.CurrentSolution.Projects.SelectMany(p => p.DocumentIds).ToSet();
this.Reanalyze(analyzer, set);
}
}
}
public void Reanalyze(IIncrementalAnalyzer analyzer, ISet<DocumentId> documentIds, bool highPriority = false)
{
var asyncToken = _listener.BeginAsyncOperation("Reanalyze");
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemAsync(analyzer, documentIds, highPriority), _shutdownToken).CompletesAsyncOperation(asyncToken);
if (documentIds?.Count > 1)
{
// log big reanalysis request from things like fix all, suppress all or option changes
// we are not interested in 1 file re-analysis request which can happen from like venus typing
SolutionCrawlerLogger.LogReanalyze(CorrelationId, analyzer, documentIds, highPriority);
}
}
private void OnWorkspaceChanged(object sender, WorkspaceChangeEventArgs args)
{
// guard us from cancellation
try
{
ProcessEvents(args, _listener.BeginAsyncOperation("OnWorkspaceChanged"));
}
catch (OperationCanceledException oce)
{
if (NotOurShutdownToken(oce))
{
throw;
}
// it is our cancellation, ignore
}
catch (AggregateException ae)
{
ae = ae.Flatten();
// If we had a mix of exceptions, don't eat it
if (ae.InnerExceptions.Any(e => !(e is OperationCanceledException)) ||
ae.InnerExceptions.Cast<OperationCanceledException>().Any(NotOurShutdownToken))
{
// We had a cancellation with a different token, so don't eat it
throw;
}
// it is our cancellation, ignore
}
}
private bool NotOurShutdownToken(OperationCanceledException oce)
{
return oce.CancellationToken == _shutdownToken;
}
private void ProcessEvents(WorkspaceChangeEventArgs args, IAsyncToken asyncToken)
{
SolutionCrawlerLogger.LogWorkspaceEvent(_logAggregator, (int)args.Kind);
// TODO: add telemetry that record how much it takes to process an event (max, min, average and etc)
switch (args.Kind)
{
case WorkspaceChangeKind.SolutionAdded:
case WorkspaceChangeKind.SolutionChanged:
case WorkspaceChangeKind.SolutionReloaded:
case WorkspaceChangeKind.SolutionRemoved:
case WorkspaceChangeKind.SolutionCleared:
ProcessSolutionEvent(args, asyncToken);
break;
case WorkspaceChangeKind.ProjectAdded:
case WorkspaceChangeKind.ProjectChanged:
case WorkspaceChangeKind.ProjectReloaded:
case WorkspaceChangeKind.ProjectRemoved:
ProcessProjectEvent(args, asyncToken);
break;
case WorkspaceChangeKind.DocumentAdded:
case WorkspaceChangeKind.DocumentReloaded:
case WorkspaceChangeKind.DocumentChanged:
case WorkspaceChangeKind.DocumentRemoved:
case WorkspaceChangeKind.AdditionalDocumentAdded:
case WorkspaceChangeKind.AdditionalDocumentRemoved:
case WorkspaceChangeKind.AdditionalDocumentChanged:
case WorkspaceChangeKind.AdditionalDocumentReloaded:
ProcessDocumentEvent(args, asyncToken);
break;
default:
throw ExceptionUtilities.UnexpectedValue(args.Kind);
}
}
private void OnDocumentOpened(object sender, DocumentEventArgs e)
{
var asyncToken = _listener.BeginAsyncOperation("OnDocumentOpened");
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemAsync(e.Document, InvocationReasons.DocumentOpened), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void OnDocumentClosed(object sender, DocumentEventArgs e)
{
var asyncToken = _listener.BeginAsyncOperation("OnDocumentClosed");
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemAsync(e.Document, InvocationReasons.DocumentClosed), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void ProcessDocumentEvent(WorkspaceChangeEventArgs e, IAsyncToken asyncToken)
{
switch (e.Kind)
{
case WorkspaceChangeKind.DocumentAdded:
EnqueueEvent(e.NewSolution, e.DocumentId, InvocationReasons.DocumentAdded, asyncToken);
break;
case WorkspaceChangeKind.DocumentRemoved:
EnqueueEvent(e.OldSolution, e.DocumentId, InvocationReasons.DocumentRemoved, asyncToken);
break;
case WorkspaceChangeKind.DocumentReloaded:
case WorkspaceChangeKind.DocumentChanged:
EnqueueEvent(e.OldSolution, e.NewSolution, e.DocumentId, asyncToken);
break;
case WorkspaceChangeKind.AdditionalDocumentAdded:
case WorkspaceChangeKind.AdditionalDocumentRemoved:
case WorkspaceChangeKind.AdditionalDocumentChanged:
case WorkspaceChangeKind.AdditionalDocumentReloaded:
// If an additional file has changed we need to reanalyze the entire project.
EnqueueEvent(e.NewSolution, e.ProjectId, InvocationReasons.AdditionalDocumentChanged, asyncToken);
break;
default:
throw ExceptionUtilities.UnexpectedValue(e.Kind);
}
}
private void ProcessProjectEvent(WorkspaceChangeEventArgs e, IAsyncToken asyncToken)
{
switch (e.Kind)
{
case WorkspaceChangeKind.ProjectAdded:
OnProjectAdded(e.NewSolution.GetProject(e.ProjectId));
EnqueueEvent(e.NewSolution, e.ProjectId, InvocationReasons.DocumentAdded, asyncToken);
break;
case WorkspaceChangeKind.ProjectRemoved:
EnqueueEvent(e.OldSolution, e.ProjectId, InvocationReasons.DocumentRemoved, asyncToken);
break;
case WorkspaceChangeKind.ProjectChanged:
case WorkspaceChangeKind.ProjectReloaded:
EnqueueEvent(e.OldSolution, e.NewSolution, e.ProjectId, asyncToken);
break;
default:
throw ExceptionUtilities.UnexpectedValue(e.Kind);
}
}
private void ProcessSolutionEvent(WorkspaceChangeEventArgs e, IAsyncToken asyncToken)
{
switch (e.Kind)
{
case WorkspaceChangeKind.SolutionAdded:
OnSolutionAdded(e.NewSolution);
EnqueueEvent(e.NewSolution, InvocationReasons.DocumentAdded, asyncToken);
break;
case WorkspaceChangeKind.SolutionRemoved:
EnqueueEvent(e.OldSolution, InvocationReasons.SolutionRemoved, asyncToken);
break;
case WorkspaceChangeKind.SolutionCleared:
EnqueueEvent(e.OldSolution, InvocationReasons.DocumentRemoved, asyncToken);
break;
case WorkspaceChangeKind.SolutionChanged:
case WorkspaceChangeKind.SolutionReloaded:
EnqueueEvent(e.OldSolution, e.NewSolution, asyncToken);
break;
default:
throw ExceptionUtilities.UnexpectedValue(e.Kind);
}
}
private void OnSolutionAdded(Solution solution)
{
var asyncToken = _listener.BeginAsyncOperation("OnSolutionAdded");
_eventProcessingQueue.ScheduleTask(() =>
{
var semanticVersionTrackingService = solution.Workspace.Services.GetService<ISemanticVersionTrackingService>();
if (semanticVersionTrackingService != null)
{
semanticVersionTrackingService.LoadInitialSemanticVersions(solution);
}
}, _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void OnProjectAdded(Project project)
{
var asyncToken = _listener.BeginAsyncOperation("OnProjectAdded");
_eventProcessingQueue.ScheduleTask(() =>
{
var semanticVersionTrackingService = project.Solution.Workspace.Services.GetService<ISemanticVersionTrackingService>();
if (semanticVersionTrackingService != null)
{
semanticVersionTrackingService.LoadInitialSemanticVersions(project);
}
}, _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void EnqueueEvent(Solution oldSolution, Solution newSolution, IAsyncToken asyncToken)
{
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemAsync(oldSolution, newSolution), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void EnqueueEvent(Solution solution, InvocationReasons invocationReasons, IAsyncToken asyncToken)
{
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemForSolutionAsync(solution, invocationReasons), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void EnqueueEvent(Solution oldSolution, Solution newSolution, ProjectId projectId, IAsyncToken asyncToken)
{
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemAfterDiffAsync(oldSolution, newSolution, projectId), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void EnqueueEvent(Solution solution, ProjectId projectId, InvocationReasons invocationReasons, IAsyncToken asyncToken)
{
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemForProjectAsync(solution, projectId, invocationReasons), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void EnqueueEvent(Solution solution, DocumentId documentId, InvocationReasons invocationReasons, IAsyncToken asyncToken)
{
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemForDocumentAsync(solution, documentId, invocationReasons), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private void EnqueueEvent(Solution oldSolution, Solution newSolution, DocumentId documentId, IAsyncToken asyncToken)
{
// document changed event is the special one.
_eventProcessingQueue.ScheduleTask(
() => EnqueueWorkItemAfterDiffAsync(oldSolution, newSolution, documentId), _shutdownToken).CompletesAsyncOperation(asyncToken);
}
private async Task EnqueueWorkItemAsync(Document document, InvocationReasons invocationReasons, SyntaxNode changedMember = null)
{
// we are shutting down
_shutdownToken.ThrowIfCancellationRequested();
var priorityService = document.GetLanguageService<IWorkCoordinatorPriorityService>();
var isLowPriority = priorityService != null && await priorityService.IsLowPriorityAsync(document, _shutdownToken).ConfigureAwait(false);
var currentMember = GetSyntaxPath(changedMember);
// call to this method is serialized. and only this method does the writing.
_documentAndProjectWorkerProcessor.Enqueue(
new WorkItem(document.Id, document.Project.Language, invocationReasons,
isLowPriority, currentMember, _listener.BeginAsyncOperation("WorkItem")));
// enqueue semantic work planner
if (invocationReasons.Contains(PredefinedInvocationReasons.SemanticChanged))
{
// must use "Document" here so that the snapshot doesn't go away. we need the snapshot to calculate p2p dependency graph later.
// due to this, we might hold onto solution (and things kept alive by it) little bit longer than usual.
_semanticChangeProcessor.Enqueue(document, currentMember);
}
}
private SyntaxPath GetSyntaxPath(SyntaxNode changedMember)
{
// using syntax path might be too expansive since it will be created on every keystroke.
// but currently, we have no other way to track a node between two different tree (even for incrementally parsed one)
if (changedMember == null)
{
return null;
}
return new SyntaxPath(changedMember);
}
private async Task EnqueueWorkItemAsync(Project project, InvocationReasons invocationReasons)
{
foreach (var documentId in project.DocumentIds)
{
var document = project.GetDocument(documentId);
await EnqueueWorkItemAsync(document, invocationReasons).ConfigureAwait(false);
}
}
private async Task EnqueueWorkItemAsync(IIncrementalAnalyzer analyzer, IEnumerable<DocumentId> documentIds, bool highPriority)
{
var solution = _registration.CurrentSolution;
foreach (var documentId in documentIds)
{
var document = solution.GetDocument(documentId);
if (document == null)
{
continue;
}
var priorityService = document.GetLanguageService<IWorkCoordinatorPriorityService>();
var isLowPriority = priorityService != null && await priorityService.IsLowPriorityAsync(document, _shutdownToken).ConfigureAwait(false);
var invocationReasons = highPriority ? InvocationReasons.ReanalyzeHighPriority : InvocationReasons.Reanalyze;
_documentAndProjectWorkerProcessor.Enqueue(
new WorkItem(documentId, document.Project.Language, invocationReasons,
isLowPriority, analyzer, _listener.BeginAsyncOperation("WorkItem")));
}
}
private async Task EnqueueWorkItemAsync(Solution oldSolution, Solution newSolution)
{
var solutionChanges = newSolution.GetChanges(oldSolution);
// TODO: Async version for GetXXX methods?
foreach (var addedProject in solutionChanges.GetAddedProjects())
{
await EnqueueWorkItemAsync(addedProject, InvocationReasons.DocumentAdded).ConfigureAwait(false);
}
foreach (var projectChanges in solutionChanges.GetProjectChanges())
{
await EnqueueWorkItemAsync(projectChanges).ConfigureAwait(continueOnCapturedContext: false);
}
foreach (var removedProject in solutionChanges.GetRemovedProjects())
{
await EnqueueWorkItemAsync(removedProject, InvocationReasons.DocumentRemoved).ConfigureAwait(false);
}
}
private async Task EnqueueWorkItemAsync(ProjectChanges projectChanges)
{
await EnqueueProjectConfigurationChangeWorkItemAsync(projectChanges).ConfigureAwait(false);
foreach (var addedDocumentId in projectChanges.GetAddedDocuments())
{
await EnqueueWorkItemAsync(projectChanges.NewProject.GetDocument(addedDocumentId), InvocationReasons.DocumentAdded).ConfigureAwait(false);
}
foreach (var changedDocumentId in projectChanges.GetChangedDocuments())
{
await EnqueueWorkItemAsync(projectChanges.OldProject.GetDocument(changedDocumentId), projectChanges.NewProject.GetDocument(changedDocumentId))
.ConfigureAwait(continueOnCapturedContext: false);
}
foreach (var removedDocumentId in projectChanges.GetRemovedDocuments())
{
await EnqueueWorkItemAsync(projectChanges.OldProject.GetDocument(removedDocumentId), InvocationReasons.DocumentRemoved).ConfigureAwait(false);
}
}
private async Task EnqueueProjectConfigurationChangeWorkItemAsync(ProjectChanges projectChanges)
{
var oldProject = projectChanges.OldProject;
var newProject = projectChanges.NewProject;
// TODO: why solution changes return Project not ProjectId but ProjectChanges return DocumentId not Document?
var projectConfigurationChange = InvocationReasons.Empty;
if (!object.Equals(oldProject.ParseOptions, newProject.ParseOptions))
{
projectConfigurationChange = projectConfigurationChange.With(InvocationReasons.ProjectParseOptionChanged);
}
if (projectChanges.GetAddedMetadataReferences().Any() ||
projectChanges.GetAddedProjectReferences().Any() ||
projectChanges.GetAddedAnalyzerReferences().Any() ||
projectChanges.GetRemovedMetadataReferences().Any() ||
projectChanges.GetRemovedProjectReferences().Any() ||
projectChanges.GetRemovedAnalyzerReferences().Any() ||
!object.Equals(oldProject.CompilationOptions, newProject.CompilationOptions) ||
!object.Equals(oldProject.AssemblyName, newProject.AssemblyName) ||
!object.Equals(oldProject.Name, newProject.Name) ||
!object.Equals(oldProject.AnalyzerOptions, newProject.AnalyzerOptions))
{
projectConfigurationChange = projectConfigurationChange.With(InvocationReasons.ProjectConfigurationChanged);
}
if (!projectConfigurationChange.IsEmpty)
{
await EnqueueWorkItemAsync(projectChanges.NewProject, projectConfigurationChange).ConfigureAwait(false);
}
}
private async Task EnqueueWorkItemAsync(Document oldDocument, Document newDocument)
{
var differenceService = newDocument.GetLanguageService<IDocumentDifferenceService>();
if (differenceService == null)
{
// For languages that don't use a Roslyn syntax tree, they don't export a document difference service.
// The whole document should be considered as changed in that case.
await EnqueueWorkItemAsync(newDocument, InvocationReasons.DocumentChanged).ConfigureAwait(false);
}
else
{
var differenceResult = await differenceService.GetDifferenceAsync(oldDocument, newDocument, _shutdownToken).ConfigureAwait(false);
if (differenceResult != null)
{
await EnqueueWorkItemAsync(newDocument, differenceResult.ChangeType, differenceResult.ChangedMember).ConfigureAwait(false);
}
}
}
private Task EnqueueWorkItemForDocumentAsync(Solution solution, DocumentId documentId, InvocationReasons invocationReasons)
{
var document = solution.GetDocument(documentId);
return EnqueueWorkItemAsync(document, invocationReasons);
}
private Task EnqueueWorkItemForProjectAsync(Solution solution, ProjectId projectId, InvocationReasons invocationReasons)
{
var project = solution.GetProject(projectId);
return EnqueueWorkItemAsync(project, invocationReasons);
}
private async Task EnqueueWorkItemForSolutionAsync(Solution solution, InvocationReasons invocationReasons)
{
foreach (var projectId in solution.ProjectIds)
{
await EnqueueWorkItemForProjectAsync(solution, projectId, invocationReasons).ConfigureAwait(false);
}
}
private async Task EnqueueWorkItemAfterDiffAsync(Solution oldSolution, Solution newSolution, ProjectId projectId)
{
var oldProject = oldSolution.GetProject(projectId);
var newProject = newSolution.GetProject(projectId);
await EnqueueWorkItemAsync(newProject.GetChanges(oldProject)).ConfigureAwait(continueOnCapturedContext: false);
}
private async Task EnqueueWorkItemAfterDiffAsync(Solution oldSolution, Solution newSolution, DocumentId documentId)
{
var oldProject = oldSolution.GetProject(documentId.ProjectId);
var newProject = newSolution.GetProject(documentId.ProjectId);
await EnqueueWorkItemAsync(oldProject.GetDocument(documentId), newProject.GetDocument(documentId)).ConfigureAwait(continueOnCapturedContext: false);
}
internal void WaitUntilCompletion_ForTestingPurposesOnly(ImmutableArray<IIncrementalAnalyzer> workers)
{
var solution = _registration.CurrentSolution;
var list = new List<WorkItem>();
foreach (var project in solution.Projects)
{
foreach (var document in project.Documents)
{
list.Add(new WorkItem(document.Id, document.Project.Language, InvocationReasons.DocumentAdded, false, EmptyAsyncToken.Instance));
}
}
_documentAndProjectWorkerProcessor.WaitUntilCompletion_ForTestingPurposesOnly(workers, list);
}
internal void WaitUntilCompletion_ForTestingPurposesOnly()
{
_documentAndProjectWorkerProcessor.WaitUntilCompletion_ForTestingPurposesOnly();
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Runtime.InteropServices;
internal static partial class Interop
{
/// <summary>Common Unix errno error codes.</summary>
internal enum Error
{
// These values were defined in src/Native/System.Native/fxerrno.h
//
// They compare against values obtained via Interop.Sys.GetLastError() not Marshal.GetLastWin32Error()
// which obtains the raw errno that varies between unixes. The strong typing as an enum is meant to
// prevent confusing the two. Casting to or from int is suspect. Use GetLastErrorInfo() if you need to
// correlate these to the underlying platform values or obtain the corresponding error message.
//
SUCCESS = 0,
E2BIG = 0x10001, // Argument list too long.
EACCES = 0x10002, // Permission denied.
EADDRINUSE = 0x10003, // Address in use.
EADDRNOTAVAIL = 0x10004, // Address not available.
EAFNOSUPPORT = 0x10005, // Address family not supported.
EAGAIN = 0x10006, // Resource unavailable, try again (same value as EWOULDBLOCK),
EALREADY = 0x10007, // Connection already in progress.
EBADF = 0x10008, // Bad file descriptor.
EBADMSG = 0x10009, // Bad message.
EBUSY = 0x1000A, // Device or resource busy.
ECANCELED = 0x1000B, // Operation canceled.
ECHILD = 0x1000C, // No child processes.
ECONNABORTED = 0x1000D, // Connection aborted.
ECONNREFUSED = 0x1000E, // Connection refused.
ECONNRESET = 0x1000F, // Connection reset.
EDEADLK = 0x10010, // Resource deadlock would occur.
EDESTADDRREQ = 0x10011, // Destination address required.
EDOM = 0x10012, // Mathematics argument out of domain of function.
EDQUOT = 0x10013, // Reserved.
EEXIST = 0x10014, // File exists.
EFAULT = 0x10015, // Bad address.
EFBIG = 0x10016, // File too large.
EHOSTUNREACH = 0x10017, // Host is unreachable.
EIDRM = 0x10018, // Identifier removed.
EILSEQ = 0x10019, // Illegal byte sequence.
EINPROGRESS = 0x1001A, // Operation in progress.
EINTR = 0x1001B, // Interrupted function.
EINVAL = 0x1001C, // Invalid argument.
EIO = 0x1001D, // I/O error.
EISCONN = 0x1001E, // Socket is connected.
EISDIR = 0x1001F, // Is a directory.
ELOOP = 0x10020, // Too many levels of symbolic links.
EMFILE = 0x10021, // File descriptor value too large.
EMLINK = 0x10022, // Too many links.
EMSGSIZE = 0x10023, // Message too large.
EMULTIHOP = 0x10024, // Reserved.
ENAMETOOLONG = 0x10025, // Filename too long.
ENETDOWN = 0x10026, // Network is down.
ENETRESET = 0x10027, // Connection aborted by network.
ENETUNREACH = 0x10028, // Network unreachable.
ENFILE = 0x10029, // Too many files open in system.
ENOBUFS = 0x1002A, // No buffer space available.
ENODEV = 0x1002C, // No such device.
ENOENT = 0x1002D, // No such file or directory.
ENOEXEC = 0x1002E, // Executable file format error.
ENOLCK = 0x1002F, // No locks available.
ENOLINK = 0x10030, // Reserved.
ENOMEM = 0x10031, // Not enough space.
ENOMSG = 0x10032, // No message of the desired type.
ENOPROTOOPT = 0x10033, // Protocol not available.
ENOSPC = 0x10034, // No space left on device.
ENOSYS = 0x10037, // Function not supported.
ENOTCONN = 0x10038, // The socket is not connected.
ENOTDIR = 0x10039, // Not a directory or a symbolic link to a directory.
ENOTEMPTY = 0x1003A, // Directory not empty.
ENOTRECOVERABLE = 0x1003B, // State not recoverable.
ENOTSOCK = 0x1003C, // Not a socket.
ENOTSUP = 0x1003D, // Not supported (same value as EOPNOTSUP).
ENOTTY = 0x1003E, // Inappropriate I/O control operation.
ENXIO = 0x1003F, // No such device or address.
EOVERFLOW = 0x10040, // Value too large to be stored in data type.
EOWNERDEAD = 0x10041, // Previous owner died.
EPERM = 0x10042, // Operation not permitted.
EPIPE = 0x10043, // Broken pipe.
EPROTO = 0x10044, // Protocol error.
EPROTONOSUPPORT = 0x10045, // Protocol not supported.
EPROTOTYPE = 0x10046, // Protocol wrong type for socket.
ERANGE = 0x10047, // Result too large.
EROFS = 0x10048, // Read-only file system.
ESPIPE = 0x10049, // Invalid seek.
ESRCH = 0x1004A, // No such process.
ESTALE = 0x1004B, // Reserved.
ETIMEDOUT = 0x1004D, // Connection timed out.
ETXTBSY = 0x1004E, // Text file busy.
EXDEV = 0x1004F, // Cross-device link.
ESOCKTNOSUPPORT = 0x1005E, // Socket type not supported.
EPFNOSUPPORT = 0x10060, // Protocol family not supported.
ESHUTDOWN = 0x1006C, // Socket shutdown.
EHOSTDOWN = 0x10070, // Host is down.
ENODATA = 0x10071, // No data available.
// Custom Error codes to track errors beyond kernel interface.
EHOSTNOTFOUND = 0x20001, // Name lookup failed
// POSIX permits these to have the same value and we make them always equal so
// that CoreFX cannot introduce a dependency on distinguishing between them that
// would not work on all platforms.
EOPNOTSUPP = ENOTSUP, // Operation not supported on socket.
EWOULDBLOCK = EAGAIN, // Operation would block.
}
// Represents a platform-agnostic Error and underlying platform-specific errno
internal struct ErrorInfo
{
private Error _error;
private int _rawErrno;
internal ErrorInfo(int errno)
{
_error = Interop.Sys.ConvertErrorPlatformToPal(errno);
_rawErrno = errno;
}
internal ErrorInfo(Error error)
{
_error = error;
_rawErrno = -1;
}
internal Error Error
{
get { return _error; }
}
internal int RawErrno
{
get { return _rawErrno == -1 ? (_rawErrno = Interop.Sys.ConvertErrorPalToPlatform(_error)) : _rawErrno; }
}
internal string GetErrorMessage()
{
return Interop.Sys.StrError(RawErrno);
}
public override string ToString()
{
return $"RawErrno: {RawErrno} Error: {Error} GetErrorMessage: {GetErrorMessage()}"; // No localization required; text is member names used for debugging purposes
}
}
internal partial class Sys
{
internal static Error GetLastError()
{
return ConvertErrorPlatformToPal(Marshal.GetLastWin32Error());
}
internal static ErrorInfo GetLastErrorInfo()
{
return new ErrorInfo(Marshal.GetLastWin32Error());
}
internal static unsafe string StrError(int platformErrno)
{
int maxBufferLength = 1024; // should be long enough for most any UNIX error
byte* buffer = stackalloc byte[maxBufferLength];
byte* message = StrErrorR(platformErrno, buffer, maxBufferLength);
if (message == null)
{
// This means the buffer was not large enough, but still contains
// as much of the error message as possible and is guaranteed to
// be null-terminated. We're not currently resizing/retrying because
// maxBufferLength is large enough in practice, but we could do
// so here in the future if necessary.
message = buffer;
}
return Marshal.PtrToStringAnsi((IntPtr)message)!;
}
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_ConvertErrorPlatformToPal")]
internal static extern Error ConvertErrorPlatformToPal(int platformErrno);
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_ConvertErrorPalToPlatform")]
internal static extern int ConvertErrorPalToPlatform(Error error);
[DllImport(Libraries.SystemNative, EntryPoint = "SystemNative_StrErrorR")]
private static extern unsafe byte* StrErrorR(int platformErrno, byte* buffer, int bufferSize);
}
}
// NOTE: extension method can't be nested inside Interop class.
internal static class InteropErrorExtensions
{
// Intended usage is e.g. Interop.Error.EFAIL.Info() for brevity
// vs. new Interop.ErrorInfo(Interop.Error.EFAIL) for synthesizing
// errors. Errors originated from the system should be obtained
// via GetLastErrorInfo(), not GetLastError().Info() as that will
// convert twice, which is not only inefficient but also lossy if
// we ever encounter a raw errno that no equivalent in the Error
// enum.
public static Interop.ErrorInfo Info(this Interop.Error error)
{
return new Interop.ErrorInfo(error);
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using ASC.Common;
using ASC.Common.Caching;
using ASC.Common.Logging;
using ASC.Common.Security.Authentication;
using ASC.Common.Threading.Progress;
using ASC.Core;
using ASC.Core.Users;
using ASC.CRM.Core;
using ASC.CRM.Core.Dao;
using ASC.CRM.Core.Entities;
using ASC.Data.Storage;
using ASC.Web.Core.Files;
using ASC.Web.CRM.Core;
using ASC.Web.CRM.Resources;
using ASC.Web.CRM.Services.NotifyService;
using ASC.Web.Files.Utils;
using ASC.Web.Studio.Utility;
using Autofac;
using Ionic.Zip;
using Newtonsoft.Json.Linq;
namespace ASC.Web.CRM.Classes
{
class ExportDataCache
{
public static readonly ICache Cache = AscCache.Default;
public static String GetStateCacheKey(string key)
{
return String.Format("{0}:crm:queue:exporttocsv", key);
}
public static String GetCancelCacheKey(string key)
{
return String.Format("{0}:crm:queue:exporttocsv:cancel", key);
}
public static ExportDataOperation Get(string key)
{
return Cache.Get<ExportDataOperation>(GetStateCacheKey(key));
}
public static void Insert(string key, ExportDataOperation data)
{
Cache.Insert(GetStateCacheKey(key), data, TimeSpan.FromMinutes(1));
}
public static bool CheckCancelFlag(string key)
{
var fromCache = Cache.Get<String>(GetCancelCacheKey(key));
return !String.IsNullOrEmpty(fromCache);
}
public static void SetCancelFlag(string key)
{
Cache.Insert(GetCancelCacheKey(key), "true", TimeSpan.FromMinutes(1));
}
public static void ResetAll(string key)
{
Cache.Remove(GetStateCacheKey(key));
Cache.Remove(GetCancelCacheKey(key));
}
}
class ExportDataOperation : IProgressItem
{
#region Constructor
public ExportDataOperation(FilterObject filterObject, string fileName)
{
_tenantId = TenantProvider.CurrentTenantID;
_author = SecurityContext.CurrentAccount;
_dataStore = Global.GetStore();
_notifyClient = NotifyClient.Instance;
_filterObject = filterObject;
_log = LogManager.GetLogger("ASC.CRM");
Id = ExportToCsv.GetKey(filterObject != null);
Status = ProgressStatus.Queued;
Error = null;
Percentage = 0;
IsCompleted = false;
FileName = fileName ?? string.Format("{0}_{1}.{2}", CRMSettingResource.Export, DateTime.UtcNow.Ticks, filterObject == null ? "zip" : "csv");
FileUrl = null;
}
public ExportDataOperation()
: this(null, null)
{
}
#endregion
#region Members
private readonly int _tenantId;
private readonly IAccount _author;
private readonly IDataStore _dataStore;
private readonly NotifyClient _notifyClient;
private readonly FilterObject _filterObject;
private readonly ILog _log;
private int _totalCount;
#endregion
public override bool Equals(object obj)
{
if (obj == null) return false;
var exportDataOperation = obj as ExportDataOperation;
if (exportDataOperation == null) return false;
return Id == exportDataOperation.Id;
}
public override int GetHashCode()
{
return Id.GetHashCode();
}
public object Clone()
{
var cloneObj = new ExportDataOperation
{
Id = Id,
Status = Status,
Error = Error,
Percentage = Percentage,
IsCompleted = IsCompleted,
FileName = FileName,
FileUrl = FileUrl
};
return cloneObj;
}
#region Property
public object Id { get; set; }
public object Status { get; set; }
public object Error { get; set; }
public double Percentage { get; set; }
public bool IsCompleted { get; set; }
public string FileName { get; set; }
public string FileUrl { get; set; }
#endregion
#region Private Methods
private static String WrapDoubleQuote(String value)
{
return "\"" + value.Trim().Replace("\"", "\"\"") + "\"";
}
private static String DataTableToCsv(DataTable dataTable)
{
var result = new StringBuilder();
var columnsCount = dataTable.Columns.Count;
for (var index = 0; index < columnsCount; index++)
{
if (index != columnsCount - 1)
result.Append(dataTable.Columns[index].Caption + ",");
else
result.Append(dataTable.Columns[index].Caption);
}
result.Append(Environment.NewLine);
foreach (DataRow row in dataTable.Rows)
{
for (var i = 0; i < columnsCount; i++)
{
var itemValue = WrapDoubleQuote(row[i].ToString());
if (i != columnsCount - 1)
result.Append(itemValue + ",");
else
result.Append(itemValue);
}
result.Append(Environment.NewLine);
}
return result.ToString();
}
#endregion
public void RunJob()
{
try
{
Status = ProgressStatus.Started;
CoreContext.TenantManager.SetCurrentTenant(_tenantId);
SecurityContext.AuthenticateMe(_author);
using (var scope = DIHelper.Resolve())
{
var daoFactory = scope.Resolve<DaoFactory>();
var userCulture = CoreContext.UserManager.GetUsers(SecurityContext.CurrentAccount.ID).GetCulture();
System.Threading.Thread.CurrentThread.CurrentCulture = userCulture;
System.Threading.Thread.CurrentThread.CurrentUICulture = userCulture;
_log.Debug("Start Export Data");
ExportDataCache.Insert((string)Id, (ExportDataOperation)Clone());
if (_filterObject == null)
ExportAllData(daoFactory);
else
ExportPartData(daoFactory);
}
Complete(100, ProgressStatus.Done, null);
_log.Debug("Export is completed");
}
catch (OperationCanceledException ocex)
{
Complete(0, ProgressStatus.Failed, ocex.Message);
_log.Debug("Export is cancel");
}
catch (Exception ex)
{
Complete(0, ProgressStatus.Failed, ex.Message);
_log.Error(ex);
}
finally
{
ExportDataCache.ResetAll((string)Id);
}
}
private void Complete(double percentage, ProgressStatus status, object error)
{
IsCompleted = true;
Percentage = percentage;
Status = status;
Error = error;
ExportDataCache.Insert((string)Id, (ExportDataOperation)Clone());
}
private void ExportAllData(DaoFactory daoFactory)
{
using (var stream = TempStream.Create())
{
var contactDao = daoFactory.ContactDao;
var contactInfoDao = daoFactory.ContactInfoDao;
var dealDao = daoFactory.DealDao;
var casesDao = daoFactory.CasesDao;
var taskDao = daoFactory.TaskDao;
var historyDao = daoFactory.RelationshipEventDao;
var invoiceItemDao = daoFactory.InvoiceItemDao;
_totalCount += contactDao.GetAllContactsCount();
_totalCount += dealDao.GetDealsCount();
_totalCount += casesDao.GetCasesCount();
_totalCount += taskDao.GetAllTasksCount();
_totalCount += historyDao.GetAllItemsCount();
_totalCount += invoiceItemDao.GetInvoiceItemsCount();
using (var zipStream = new ZipOutputStream(stream, true))
{
zipStream.AlternateEncoding = Encoding.UTF8;
zipStream.AlternateEncodingUsage = ZipOption.Always;
zipStream.PutNextEntry(CRMContactResource.Contacts + ".csv");
var contactData = contactDao.GetAllContacts();
var contactInfos = new StringDictionary();
contactInfoDao.GetAll()
.ForEach(item =>
{
var contactInfoKey = String.Format("{0}_{1}_{2}", item.ContactID, (int)item.InfoType, item.Category);
if (contactInfos.ContainsKey(contactInfoKey))
{
contactInfos[contactInfoKey] += "," + item.Data;
}
else
{
contactInfos.Add(contactInfoKey, item.Data);
}
});
using (var zipEntryData = new MemoryStream(Encoding.UTF8.GetBytes(ExportContactsToCsv(contactData, contactInfos, daoFactory))))
{
zipEntryData.CopyTo(zipStream);
}
zipStream.PutNextEntry(CRMCommonResource.DealModuleName + ".csv");
var dealData = dealDao.GetAllDeals();
using (var zipEntryData = new MemoryStream(Encoding.UTF8.GetBytes(ExportDealsToCsv(dealData, daoFactory))))
{
zipEntryData.CopyTo(zipStream);
}
zipStream.PutNextEntry(CRMCommonResource.CasesModuleName + ".csv");
var casesData = casesDao.GetAllCases();
using (var zipEntryData = new MemoryStream(Encoding.UTF8.GetBytes(ExportCasesToCsv(casesData, daoFactory))))
{
zipEntryData.CopyTo(zipStream);
}
zipStream.PutNextEntry(CRMCommonResource.TaskModuleName + ".csv");
var taskData = taskDao.GetAllTasks();
using (var zipEntryData = new MemoryStream(Encoding.UTF8.GetBytes(ExportTasksToCsv(taskData, daoFactory))))
{
zipEntryData.CopyTo(zipStream);
}
zipStream.PutNextEntry(CRMCommonResource.History + ".csv");
var historyData = historyDao.GetAllItems();
using (var zipEntryData = new MemoryStream(Encoding.UTF8.GetBytes(ExportHistoryToCsv(historyData, daoFactory))))
{
zipEntryData.CopyTo(zipStream);
}
zipStream.PutNextEntry(CRMCommonResource.ProductsAndServices + ".csv");
var invoiceItemData = invoiceItemDao.GetAll();
using (var zipEntryData = new MemoryStream(Encoding.UTF8.GetBytes(ExportInvoiceItemsToCsv(invoiceItemData, daoFactory))))
{
zipEntryData.CopyTo(zipStream);
}
zipStream.Flush();
zipStream.Close();
stream.Position = 0;
}
if (_dataStore.IsDirectory("export", string.Empty))
{
_dataStore.DeleteFiles("export", string.Empty, "*.*", true);
}
FileUrl = CommonLinkUtility.GetFullAbsolutePath(_dataStore.Save("export", FileName, stream).ToString());
_notifyClient.SendAboutExportCompleted(_author.ID, FileName, FileUrl);
}
}
private void ExportPartData(DaoFactory daoFactory)
{
var items = _filterObject.GetItemsByFilter(daoFactory);
string fileContent;
_totalCount = items.Count;
if (_totalCount == 0)
throw new ArgumentException(CRMErrorsResource.ExportToCSVDataEmpty);
if (items is List<Contact>)
{
var contactInfoDao = daoFactory.ContactInfoDao;
var contacts = (List<Contact>)items;
var contactInfos = new StringDictionary();
contactInfoDao.GetAll(contacts.Select(item => item.ID).ToArray())
.ForEach(item =>
{
var contactInfoKey = String.Format("{0}_{1}_{2}", item.ContactID,
(int)item.InfoType,
item.Category);
if (contactInfos.ContainsKey(contactInfoKey))
contactInfos[contactInfoKey] += "," + item.Data;
else
contactInfos.Add(contactInfoKey, item.Data);
});
fileContent = ExportContactsToCsv(contacts, contactInfos, daoFactory);
}
else if (items is List<Deal>)
{
fileContent = ExportDealsToCsv((List<Deal>)items, daoFactory);
}
else if (items is List<ASC.CRM.Core.Entities.Cases>)
{
fileContent = ExportCasesToCsv((List<ASC.CRM.Core.Entities.Cases>)items, daoFactory);
}
else if (items is List<RelationshipEvent>)
{
fileContent = ExportHistoryToCsv((List<RelationshipEvent>)items, daoFactory);
}
else if (items is List<Task>)
{
fileContent = ExportTasksToCsv((List<Task>)items, daoFactory);
}
else if (items is List<InvoiceItem>)
{
fileContent = ExportInvoiceItemsToCsv((List<InvoiceItem>)items, daoFactory);
}
else
throw new ArgumentException();
FileUrl = SaveCsvFileInMyDocument(FileName, fileContent);
}
private String ExportContactsToCsv(IReadOnlyCollection<Contact> contacts, StringDictionary contactInfos, DaoFactory daoFactory)
{
var key = (string)Id;
var listItemDao = daoFactory.ListItemDao;
var tagDao = daoFactory.TagDao;
var customFieldDao = daoFactory.CustomFieldDao;
var contactDao = daoFactory.ContactDao;
var dataTable = new DataTable();
dataTable.Columns.AddRange(new[]
{
new DataColumn
{
Caption = CRMCommonResource.TypeCompanyOrPerson,
ColumnName = "company/person"
},
new DataColumn
{
Caption = CRMContactResource.FirstName,
ColumnName = "firstname"
},
new DataColumn
{
Caption = CRMContactResource.LastName,
ColumnName = "lastname"
},
new DataColumn
{
Caption = CRMContactResource.CompanyName,
ColumnName = "companyname"
},
new DataColumn
{
Caption = CRMContactResource.JobTitle,
ColumnName = "jobtitle"
},
new DataColumn
{
Caption = CRMContactResource.About,
ColumnName = "about"
},
new DataColumn
{
Caption = CRMContactResource.ContactStage,
ColumnName = "contact_stage"
},
new DataColumn
{
Caption = CRMContactResource.ContactType,
ColumnName = "contact_type"
},
new DataColumn
{
Caption = CRMContactResource.ContactTagList,
ColumnName = "contact_tag_list"
}
});
foreach (ContactInfoType infoTypeEnum in Enum.GetValues(typeof(ContactInfoType)))
foreach (Enum categoryEnum in Enum.GetValues(ContactInfo.GetCategory(infoTypeEnum)))
{
var localTitle = String.Format("{1} ({0})", categoryEnum.ToLocalizedString().ToLower(), infoTypeEnum.ToLocalizedString());
if (infoTypeEnum == ContactInfoType.Address)
dataTable.Columns.AddRange((from AddressPart addressPartEnum in Enum.GetValues(typeof(AddressPart))
select new DataColumn
{
Caption = String.Format(localTitle + " {0}", addressPartEnum.ToLocalizedString().ToLower()),
ColumnName = String.Format("contactInfo_{0}_{1}_{2}", (int)infoTypeEnum, categoryEnum, (int)addressPartEnum)
}).ToArray());
else
dataTable.Columns.Add(new DataColumn
{
Caption = localTitle,
ColumnName = String.Format("contactInfo_{0}_{1}", (int)infoTypeEnum, categoryEnum)
});
}
var fieldsDescription = customFieldDao.GetFieldsDescription(EntityType.Company);
customFieldDao.GetFieldsDescription(EntityType.Person).ForEach(item =>
{
var alreadyContains = fieldsDescription.Any(field => field.ID == item.ID);
if (!alreadyContains)
fieldsDescription.Add(item);
});
fieldsDescription.ForEach(
item =>
{
if (item.FieldType == CustomFieldType.Heading) return;
dataTable.Columns.Add(
new DataColumn
{
Caption = item.Label,
ColumnName = "customField_" + item.ID
}
);
});
var companyCustomFields = contacts.Where(x => x is Company).Any() ? customFieldDao.GetEntityFields(EntityType.Company, contacts.Where(x => x is Company).Select(x => x.ID).ToArray()) : new List<CustomField>();
var personCustomFields = contacts.Where(x => x is Person).Any() ? customFieldDao.GetEntityFields(EntityType.Person, contacts.Where(x => x is Person).Select(x => x.ID).ToArray()) : new List<CustomField>();
var customFields = companyCustomFields.Union(personCustomFields);
var customFieldEntity = customFields
.GroupBy(x => x.EntityID)
.ToDictionary(x => x.Key, x => x.ToList());
var tags = tagDao.GetEntitiesTags(EntityType.Contact);
foreach (var contact in contacts)
{
if (ExportDataCache.CheckCancelFlag(key))
{
ExportDataCache.ResetAll(key);
throw new OperationCanceledException();
}
ExportDataCache.Insert(key, (ExportDataOperation)Clone());
Percentage += 1.0 * 100 / _totalCount;
var isCompany = contact is Company;
var compPersType = (isCompany) ? CRMContactResource.Company : CRMContactResource.Person;
var contactTags = String.Empty;
if (tags.ContainsKey(contact.ID))
contactTags = String.Join(",", tags[contact.ID].OrderBy(x => x));
String firstName;
String lastName;
String companyName;
String title;
if (contact is Company)
{
firstName = String.Empty;
lastName = String.Empty;
title = String.Empty;
companyName = ((Company)contact).CompanyName;
}
else
{
var people = (Person)contact;
firstName = people.FirstName;
lastName = people.LastName;
title = people.JobTitle;
companyName = String.Empty;
if (people.CompanyID > 0)
{
var personCompany = contacts.SingleOrDefault(item => item.ID == people.CompanyID) ??
contactDao.GetByID(people.CompanyID);
if (personCompany != null)
companyName = personCompany.GetTitle();
}
}
var contactStatus = String.Empty;
if (contact.StatusID > 0)
{
var listItem = listItemDao.GetByID(contact.StatusID);
if (listItem != null)
contactStatus = listItem.Title;
}
var contactType = String.Empty;
if (contact.ContactTypeID > 0)
{
var listItem = listItemDao.GetByID(contact.ContactTypeID);
if (listItem != null)
contactType = listItem.Title;
}
var dataRowItems = new List<String>
{
compPersType,
firstName,
lastName,
companyName,
title,
contact.About,
contactStatus,
contactType,
contactTags
};
foreach (ContactInfoType infoTypeEnum in Enum.GetValues(typeof(ContactInfoType)))
foreach (Enum categoryEnum in Enum.GetValues(ContactInfo.GetCategory(infoTypeEnum)))
{
var contactInfoKey = String.Format("{0}_{1}_{2}", contact.ID,
(int)infoTypeEnum,
Convert.ToInt32(categoryEnum));
var columnValue = "";
if (contactInfos.ContainsKey(contactInfoKey))
columnValue = contactInfos[contactInfoKey];
if (infoTypeEnum == ContactInfoType.Address)
{
if (!String.IsNullOrEmpty(columnValue))
{
var addresses = JArray.Parse(String.Concat("[", columnValue, "]"));
dataRowItems.AddRange((from AddressPart addressPartEnum in Enum.GetValues(typeof(AddressPart))
select String.Join(",", addresses.Select(item => (String)item.SelectToken(addressPartEnum.ToString().ToLower())).ToArray())).ToArray());
}
else
{
dataRowItems.AddRange(new[] { "", "", "", "", "" });
}
}
else
{
dataRowItems.Add(columnValue);
}
}
var dataRow = dataTable.Rows.Add(dataRowItems.ToArray());
if (customFieldEntity.ContainsKey(contact.ID))
customFieldEntity[contact.ID].ForEach(item => dataRow["customField_" + item.ID] = item.Value);
}
return DataTableToCsv(dataTable);
}
private String ExportDealsToCsv(IEnumerable<Deal> deals, DaoFactory daoFactory)
{
var key = (string)Id;
var tagDao = daoFactory.TagDao;
var customFieldDao = daoFactory.CustomFieldDao;
var dealMilestoneDao = daoFactory.DealMilestoneDao;
var contactDao = daoFactory.ContactDao;
var dataTable = new DataTable();
dataTable.Columns.AddRange(new[]
{
new DataColumn
{
Caption = CRMDealResource.NameDeal,
ColumnName = "title"
},
new DataColumn
{
Caption = CRMDealResource.ClientDeal,
ColumnName = "client_deal"
},
new DataColumn
{
Caption = CRMDealResource.OtherMembersDeal,
ColumnName = "member"
},
new DataColumn
{
Caption = CRMDealResource.DescriptionDeal,
ColumnName = "description"
},
new DataColumn
{
Caption = CRMCommonResource.Currency,
ColumnName = "currency"
},
new DataColumn
{
Caption = CRMDealResource.DealAmount,
ColumnName = "amount"
},
new DataColumn
{
Caption = CRMDealResource.BidType,
ColumnName = "bid_type"
},
new DataColumn
{
Caption = CRMDealResource.BidTypePeriod,
ColumnName = "bid_type_period"
},
new DataColumn
{
Caption = CRMJSResource.ExpectedCloseDate,
ColumnName = "expected_close_date"
},
new DataColumn
{
Caption = CRMJSResource.ActualCloseDate,
ColumnName = "actual_close_date"
},
new DataColumn
{
Caption = CRMDealResource.ResponsibleDeal,
ColumnName = "responsible_deal"
},
new DataColumn
{
Caption = CRMDealResource.CurrentDealMilestone,
ColumnName = "current_deal_milestone"
},
new DataColumn
{
Caption = CRMDealResource.DealMilestoneType,
ColumnName = "deal_milestone_type"
},
new DataColumn
{
Caption = (CRMDealResource.ProbabilityOfWinning + " %"),
ColumnName = "probability_of_winning"
},
new DataColumn
{
Caption = (CRMDealResource.DealTagList),
ColumnName = "tag_list"
}
});
customFieldDao.GetFieldsDescription(EntityType.Opportunity).ForEach(
item =>
{
if (item.FieldType == CustomFieldType.Heading) return;
dataTable.Columns.Add(new DataColumn
{
Caption = item.Label,
ColumnName = "customField_" + item.ID
});
});
var customFieldEntity = customFieldDao.GetEntityFields(EntityType.Opportunity, deals.Select(x => x.ID).ToArray())
.GroupBy(x => x.EntityID)
.ToDictionary(x => x.Key, x => x.ToList());
var tags = tagDao.GetEntitiesTags(EntityType.Opportunity);
foreach (var deal in deals)
{
if (ExportDataCache.CheckCancelFlag(key))
{
ExportDataCache.ResetAll(key);
throw new OperationCanceledException();
}
ExportDataCache.Insert(key, (ExportDataOperation)Clone());
Percentage += 1.0 * 100 / _totalCount;
var contactTags = String.Empty;
if (tags.ContainsKey(deal.ID))
contactTags = String.Join(",", tags[deal.ID].OrderBy(x => x));
String bidType;
switch (deal.BidType)
{
case BidType.FixedBid:
bidType = CRMDealResource.BidType_FixedBid;
break;
case BidType.PerDay:
bidType = CRMDealResource.BidType_PerDay;
break;
case BidType.PerHour:
bidType = CRMDealResource.BidType_PerHour;
break;
case BidType.PerMonth:
bidType = CRMDealResource.BidType_PerMonth;
break;
case BidType.PerWeek:
bidType = CRMDealResource.BidType_PerWeek;
break;
case BidType.PerYear:
bidType = CRMDealResource.BidType_PerYear;
break;
default:
throw new ArgumentException();
}
var currentDealMilestone = dealMilestoneDao.GetByID(deal.DealMilestoneID);
var currentDealMilestoneStatus = currentDealMilestone.Status.ToLocalizedString();
var contactTitle = String.Empty;
if (deal.ContactID != 0)
contactTitle = contactDao.GetByID(deal.ContactID).GetTitle();
var members = string.Empty;
var dealMembersIDs = daoFactory.DealDao.GetMembers(deal.ID).Where(id => id != deal.ContactID);
if (dealMembersIDs.Any())
{
var dealMembers = daoFactory.ContactDao.GetContacts(dealMembersIDs.ToArray());
members = string.Join(",", dealMembers.Select(member => member.GetTitle()));
}
var dataRow = dataTable.Rows.Add(new object[]
{
deal.Title,
contactTitle,
members,
deal.Description,
deal.BidCurrency,
deal.BidValue.ToString(CultureInfo.InvariantCulture),
bidType,
deal.PerPeriodValue == 0 ? "" : deal.PerPeriodValue.ToString(CultureInfo.InvariantCulture),
deal.ExpectedCloseDate.Date == DateTime.MinValue.Date ? "" : deal.ExpectedCloseDate.ToString(DateTimeExtension.DateFormatPattern),
deal.ActualCloseDate.Date == DateTime.MinValue.Date ? "" : deal.ActualCloseDate.ToString(DateTimeExtension.DateFormatPattern),
CoreContext.UserManager.GetUsers(deal.ResponsibleID).DisplayUserName(false),
currentDealMilestone.Title,
currentDealMilestoneStatus,
deal.DealMilestoneProbability.ToString(CultureInfo.InvariantCulture),
contactTags
});
if (customFieldEntity.ContainsKey(deal.ID))
customFieldEntity[deal.ID].ForEach(item => dataRow["customField_" + item.ID] = item.Value);
}
return DataTableToCsv(dataTable);
}
private String ExportCasesToCsv(IEnumerable<ASC.CRM.Core.Entities.Cases> cases, DaoFactory daoFactory)
{
var key = (string)Id;
var tagDao = daoFactory.TagDao;
var customFieldDao = daoFactory.CustomFieldDao;
var dataTable = new DataTable();
dataTable.Columns.AddRange(new[]
{
new DataColumn
{
Caption = CRMCasesResource.CaseTitle,
ColumnName = "title"
},
new DataColumn(CRMCasesResource.CasesTagList)
{
Caption = CRMCasesResource.CasesTagList,
ColumnName = "tag_list"
}
});
customFieldDao.GetFieldsDescription(EntityType.Case).ForEach(
item =>
{
if (item.FieldType == CustomFieldType.Heading) return;
dataTable.Columns.Add(new DataColumn
{
Caption = item.Label,
ColumnName = "customField_" + item.ID
});
});
var customFieldEntity = customFieldDao.GetEntityFields(EntityType.Case, cases.Select(x => x.ID).ToArray())
.GroupBy(x => x.EntityID)
.ToDictionary(x => x.Key, x => x.ToList());
var tags = tagDao.GetEntitiesTags(EntityType.Case);
foreach (var item in cases)
{
if (ExportDataCache.CheckCancelFlag(key))
{
ExportDataCache.ResetAll(key);
throw new OperationCanceledException();
}
ExportDataCache.Insert(key, (ExportDataOperation)Clone());
Percentage += 1.0 * 100 / _totalCount;
var contactTags = String.Empty;
if (tags.ContainsKey(item.ID))
contactTags = String.Join(",", tags[item.ID].OrderBy(x => x));
var dataRow = dataTable.Rows.Add(new object[]
{
item.Title,
contactTags
});
if (customFieldEntity.ContainsKey(item.ID))
customFieldEntity[item.ID].ForEach(row => dataRow["customField_" + row.ID] = row.Value);
}
return DataTableToCsv(dataTable);
}
private String ExportHistoryToCsv(IEnumerable<RelationshipEvent> events, DaoFactory daoFactory)
{
var key = (string)Id;
var listItemDao = daoFactory.ListItemDao;
var dealDao = daoFactory.DealDao;
var casesDao = daoFactory.CasesDao;
var contactDao = daoFactory.ContactDao;
var dataTable = new DataTable();
dataTable.Columns.AddRange(new[]
{
new DataColumn
{
Caption = (CRMContactResource.Content),
ColumnName = "content"
},
new DataColumn
{
Caption = (CRMCommonResource.Category),
ColumnName = "category"
},
new DataColumn
{
Caption = (CRMContactResource.ContactTitle),
ColumnName = "contact_title"
},
new DataColumn
{
Caption = (CRMContactResource.RelativeEntity),
ColumnName = "relative_entity"
},
new DataColumn
{
Caption = (CRMCommonResource.Author),
ColumnName = "author"
},
new DataColumn
{
Caption = (CRMCommonResource.CreateDate),
ColumnName = "create_date"
}
});
foreach (var item in events)
{
if (ExportDataCache.CheckCancelFlag(key))
{
ExportDataCache.ResetAll(key);
throw new OperationCanceledException();
}
ExportDataCache.Insert(key, (ExportDataOperation)Clone());
Percentage += 1.0 * 100 / _totalCount;
var entityTitle = String.Empty;
if (item.EntityID > 0)
switch (item.EntityType)
{
case EntityType.Case:
var casesObj = casesDao.GetByID(item.EntityID);
if (casesObj != null)
entityTitle = String.Format("{0}: {1}", CRMCasesResource.Case,
casesObj.Title);
break;
case EntityType.Opportunity:
var dealObj = dealDao.GetByID(item.EntityID);
if (dealObj != null)
entityTitle = String.Format("{0}: {1}", CRMDealResource.Deal,
dealObj.Title);
break;
}
var contactTitle = String.Empty;
if (item.ContactID > 0)
{
var contactObj = contactDao.GetByID(item.ContactID);
if (contactObj != null)
contactTitle = contactObj.GetTitle();
}
var categoryTitle = String.Empty;
if (item.CategoryID > 0)
{
var categoryObj = listItemDao.GetByID(item.CategoryID);
if (categoryObj != null)
categoryTitle = categoryObj.Title;
}
else if (item.CategoryID == (int)HistoryCategorySystem.TaskClosed)
categoryTitle = HistoryCategorySystem.TaskClosed.ToLocalizedString();
else if (item.CategoryID == (int)HistoryCategorySystem.FilesUpload)
categoryTitle = HistoryCategorySystem.FilesUpload.ToLocalizedString();
else if (item.CategoryID == (int)HistoryCategorySystem.MailMessage)
categoryTitle = HistoryCategorySystem.MailMessage.ToLocalizedString();
dataTable.Rows.Add(new object[]
{
item.Content,
categoryTitle,
contactTitle,
entityTitle,
CoreContext.UserManager.GetUsers(item.CreateBy).DisplayUserName(false),
item.CreateOn.ToShortString()
});
}
return DataTableToCsv(dataTable);
}
private String ExportTasksToCsv(IEnumerable<Task> tasks, DaoFactory daoFactory)
{
var key = (string)Id;
var listItemDao = daoFactory.ListItemDao;
var dealDao = daoFactory.DealDao;
var casesDao = daoFactory.CasesDao;
var contactDao = daoFactory.ContactDao;
var dataTable = new DataTable();
dataTable.Columns.AddRange(new[]
{
new DataColumn
{
Caption = (CRMTaskResource.TaskTitle),
ColumnName = "title"
},
new DataColumn
{
Caption = (CRMTaskResource.Description),
ColumnName = "description"
},
new DataColumn
{
Caption = (CRMTaskResource.DueDate),
ColumnName = "due_date"
},
new DataColumn
{
Caption = (CRMTaskResource.Responsible),
ColumnName = "responsible"
},
new DataColumn
{
Caption = (CRMContactResource.ContactTitle),
ColumnName = "contact_title"
},
new DataColumn
{
Caption = (CRMTaskResource.TaskStatus),
ColumnName = "task_status"
},
new DataColumn
{
Caption = (CRMTaskResource.TaskCategory),
ColumnName = "task_category"
},
new DataColumn
{
Caption = (CRMContactResource.RelativeEntity),
ColumnName = "relative_entity"
},
new DataColumn
{
Caption = (CRMCommonResource.Alert),
ColumnName = "alert_value"
}
});
foreach (var item in tasks)
{
if (ExportDataCache.CheckCancelFlag(key))
{
ExportDataCache.ResetAll(key);
throw new OperationCanceledException();
}
ExportDataCache.Insert(key, (ExportDataOperation)Clone());
Percentage += 1.0 * 100 / _totalCount;
var entityTitle = String.Empty;
if (item.EntityID > 0)
switch (item.EntityType)
{
case EntityType.Case:
var caseObj = casesDao.GetByID(item.EntityID);
if (caseObj != null)
entityTitle = String.Format("{0}: {1}", CRMCasesResource.Case, caseObj.Title);
break;
case EntityType.Opportunity:
var dealObj = dealDao.GetByID(item.EntityID);
if (dealObj != null)
entityTitle = String.Format("{0}: {1}", CRMDealResource.Deal, dealObj.Title);
break;
}
var contactTitle = String.Empty;
if (item.ContactID > 0)
{
var contact = contactDao.GetByID(item.ContactID);
if (contact != null)
contactTitle = contact.GetTitle();
}
dataTable.Rows.Add(new object[]
{
item.Title,
item.Description,
item.DeadLine == DateTime.MinValue
? ""
: item.DeadLine.ToShortString(),
CoreContext.UserManager.GetUsers(item.ResponsibleID).DisplayUserName(false),
contactTitle,
item.IsClosed
? CRMTaskResource.TaskStatus_Closed
: CRMTaskResource.TaskStatus_Open,
listItemDao.GetByID(item.CategoryID).Title,
entityTitle,
item.AlertValue.ToString(CultureInfo.InvariantCulture)
});
}
return DataTableToCsv(dataTable);
}
private String ExportInvoiceItemsToCsv(IEnumerable<InvoiceItem> invoiceItems, DaoFactory daoFactory)
{
var key = (string)Id;
var taxes = daoFactory.InvoiceTaxDao.GetAll();
var dataTable = new DataTable();
dataTable.Columns.AddRange(new[]
{
new DataColumn
{
Caption = (CRMInvoiceResource.InvoiceItemName),
ColumnName = "title"
},
new DataColumn
{
Caption = (CRMSettingResource.Description),
ColumnName = "description"
},
new DataColumn
{
Caption = (CRMInvoiceResource.StockKeepingUnit),
ColumnName = "sku"
},
new DataColumn
{
Caption = (CRMInvoiceResource.InvoiceItemPrice),
ColumnName = "price"
},
new DataColumn
{
Caption = (CRMInvoiceResource.FormInvoiceItemStockQuantity),
ColumnName = "stock_quantity"
},
new DataColumn
{
Caption = (CRMInvoiceResource.TrackInventory),
ColumnName = "track_inventory"
},
new DataColumn
{
Caption = (CRMInvoiceResource.Currency),
ColumnName = "currency"
},
new DataColumn
{
Caption = (CRMInvoiceResource.InvoiceTax1Name),
ColumnName = "tax1_name"
},
new DataColumn
{
Caption = (CRMInvoiceResource.InvoiceTax1Rate),
ColumnName = "tax1_rate"
},
new DataColumn
{
Caption = (CRMInvoiceResource.InvoiceTax2Name),
ColumnName = "tax2_name"
},
new DataColumn
{
Caption = (CRMInvoiceResource.InvoiceTax2Rate),
ColumnName = "tax2_rate"
}
});
foreach (var item in invoiceItems)
{
if (ExportDataCache.CheckCancelFlag(key))
{
ExportDataCache.ResetAll(key);
throw new OperationCanceledException();
}
ExportDataCache.Insert(key, (ExportDataOperation)Clone());
Percentage += 1.0 * 100 / _totalCount;
var tax1 = item.InvoiceTax1ID != 0 ? taxes.Find(t => t.ID == item.InvoiceTax1ID) : null;
var tax2 = item.InvoiceTax2ID != 0 ? taxes.Find(t => t.ID == item.InvoiceTax2ID) : null;
dataTable.Rows.Add(new object[]
{
item.Title,
item.Description,
item.StockKeepingUnit,
item.Price.ToString(CultureInfo.InvariantCulture),
item.StockQuantity.ToString(CultureInfo.InvariantCulture),
item.TrackInventory.ToString(),
item.Currency,
tax1 != null ? tax1.Name : "",
tax1 != null ? tax1.Rate.ToString(CultureInfo.InvariantCulture) : "",
tax2 != null ? tax2.Name : "",
tax2 != null ? tax2.Rate.ToString(CultureInfo.InvariantCulture) : ""
});
}
return DataTableToCsv(dataTable);
}
private static String SaveCsvFileInMyDocument(String title, String data)
{
string fileUrl;
using (var memStream = new MemoryStream(Encoding.UTF8.GetBytes(data)))
{
var file = FileUploader.Exec(Files.Classes.Global.FolderMy.ToString(), title, memStream.Length, memStream, true);
if (FileUtility.CanWebView(title) || FileUtility.CanWebEdit(title))
{
fileUrl = FilesLinkUtility.GetFileWebEditorUrl((int)file.ID);
fileUrl += string.Format("&options={{\"delimiter\":{0},\"codePage\":{1}}}",
(int)FileUtility.CsvDelimiter.Comma,
Encoding.UTF8.CodePage);
}
else
{
fileUrl = FilesLinkUtility.GetFileDownloadUrl((int)file.ID);
}
}
return fileUrl;
}
}
public class ExportToCsv
{
#region Members
private static readonly object Locker = new object();
private static readonly ProgressQueue Queue = new ProgressQueue(Global.GetQueueWorkerCount("export"), Global.GetQueueWaitInterval("export"), true);
#endregion
#region Public Methods
public static IProgressItem GetStatus(bool partialDataExport)
{
var key = GetKey(partialDataExport);
return Queue.GetStatus(key) ?? ExportDataCache.Get(key);
}
public static IProgressItem Start(FilterObject filterObject, string fileName)
{
lock (Locker)
{
var key = GetKey(filterObject != null);
var operation = Queue.GetStatus(key);
if (operation == null)
{
var fromCache = ExportDataCache.Get(key);
if (fromCache != null)
return fromCache;
}
if (operation == null)
{
ExportDataCache.ResetAll(key);
operation = new ExportDataOperation(filterObject, fileName);
Queue.Add(operation);
}
if (!Queue.IsStarted)
Queue.Start(x => x.RunJob());
return operation;
}
}
public static void Cancel(bool partialDataExport)
{
lock (Locker)
{
var key = GetKey(partialDataExport);
var findedItem = Queue.GetItems().FirstOrDefault(elem => (string)elem.Id == key);
if (findedItem != null)
{
Queue.Remove(findedItem);
}
ExportDataCache.SetCancelFlag(key);
}
}
public static string GetKey(bool partialDataExport)
{
return string.Format("{0}_{1}", TenantProvider.CurrentTenantID,
partialDataExport ? SecurityContext.CurrentAccount.ID : Guid.Empty);
}
public static String ExportItems(FilterObject filterObject, string fileName)
{
var operation = new ExportDataOperation(filterObject, fileName);
operation.RunJob();
return operation.FileUrl;
}
#endregion
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
namespace NRules.RuleModel.Builders
{
internal static class ElementValidator
{
public static void ValidateUniqueDeclarations(params RuleElement[] elements)
{
ValidateUniqueDeclarations(elements.AsEnumerable());
}
public static void ValidateUniqueDeclarations(IEnumerable<RuleElement> elements)
{
var duplicates = elements.SelectMany(x => x.Exports)
.GroupBy(x => x.Name)
.Where(x => x.Count() > 1)
.ToArray();
if (duplicates.Any())
{
var declarations = string.Join(",", duplicates.Select(x => x.Key));
throw new InvalidOperationException($"Duplicate declarations. Declaration={declarations}");
}
}
public static void ValidateRuleDefinition(RuleDefinition definition)
{
var exports = definition.LeftHandSide.Exports
.Concat(definition.DependencyGroup.Exports).ToArray();
var undefinedLhs = definition.LeftHandSide.Imports
.Except(exports).ToArray();
if (undefinedLhs.Any())
{
var variables = string.Join(",", undefinedLhs.Select(x => x.Name));
throw new InvalidOperationException($"Undefined variables in rule match conditions. Variables={variables}");
}
var undefinedFilter = definition.FilterGroup.Imports
.Except(exports).ToArray();
if (undefinedFilter.Any())
{
var variables = string.Join(",", undefinedFilter.Select(x => x.Name));
throw new InvalidOperationException($"Undefined variables in rule filter. Variables={variables}");
}
var undefinedRhs = definition.RightHandSide.Imports
.Except(exports).ToArray();
if (undefinedRhs.Any())
{
var variables = string.Join(",", undefinedRhs.Select(x => x.Name));
throw new InvalidOperationException($"Undefined variables in rule actions. Variables={variables}");
}
var lhsDependencyRefs = definition.LeftHandSide.Imports
.Intersect(definition.DependencyGroup.Exports).ToArray();
if (lhsDependencyRefs.Any())
{
var variables = string.Join(",", lhsDependencyRefs.Select(x => x.Name));
throw new InvalidOperationException($"Rule match conditions cannot reference injected dependencies. Variables={variables}");
}
}
public static void ValidateAggregate(AggregateElement element)
{
switch (element.Name)
{
case AggregateElement.CollectName:
ValidateCollectAggregate(element);
break;
case AggregateElement.GroupByName:
ValidateGroupByAggregate(element);
break;
case AggregateElement.ProjectName:
ValidateProjectAggregate(element);
break;
case AggregateElement.FlattenName:
ValidateFlattenAggregate(element);
break;
}
}
public static void ValidateCollectAggregate(AggregateElement element)
{
var sourceType = element.Source.ValueType;
var resultType = element.ResultType;
var keySelectors = element.Expressions.Find(AggregateElement.KeySelectorName).ToArray();
if (keySelectors.Length > 1)
{
throw new ArgumentException(
$"Collect aggregator can have no more than one key selector. Count={keySelectors.Length}");
}
foreach (var keySelector in keySelectors.Select(x => x.Expression))
{
if (keySelector.Parameters.Count == 0)
{
throw new ArgumentException(
$"Collect key selector must have at least one parameter. KeySelector={keySelector}");
}
if (keySelector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"Collect key selector must have a parameter type that matches the aggregate source. " +
$"KeySelector={keySelector}, ExpectedType={sourceType}, ActualType={keySelector.Parameters[0].Type}");
}
}
var elementSelectors = element.Expressions.Find(AggregateElement.ElementSelectorName).ToArray();
if (elementSelectors.Length > 1)
{
throw new ArgumentException(
$"Collect aggregator can have no more than one element selector. Count={elementSelectors.Length}");
}
foreach (var elementSelector in elementSelectors.Select(x => x.Expression))
{
if (elementSelector.Parameters.Count == 0)
{
throw new ArgumentException(
$"Collect element selector must have at least one parameter. KeySelector={elementSelector}");
}
if (elementSelector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"Collect element selector must have a parameter type that matches the aggregate source. " +
$"ElementSelector={elementSelector}, ExpectedType={sourceType}, ActualType={elementSelector.Parameters[0].Type}");
}
}
if (keySelectors.Length > 0)
{
var expectedResultType = typeof(ILookup<,>).MakeGenericType(
keySelectors[0].Expression.ReturnType, elementSelectors[0].Expression.ReturnType);
if (!expectedResultType.IsAssignableFrom(resultType))
{
throw new ArgumentException(
$"Collect result with grouping key must be a lookup collection. ExpectedType={expectedResultType}, ActualType={resultType}");
}
}
else
{
var expectedResultType = typeof(IEnumerable<>).MakeGenericType(sourceType);
if (!expectedResultType.IsAssignableFrom(resultType))
{
throw new ArgumentException(
$"Collect result must be a collection of source elements. ExpectedType={expectedResultType}, ActualType={resultType}");
}
}
var sortKeySelectorsAscending = element.Expressions.Find(AggregateElement.KeySelectorAscendingName);
var sortKeySelectorsDescending = element.Expressions.Find(AggregateElement.KeySelectorDescendingName);
foreach (var sortKeySelector in sortKeySelectorsAscending.Concat(sortKeySelectorsDescending).Select(x => x.Expression))
{
if (sortKeySelector.Parameters.Count == 0)
{
throw new ArgumentException(
$"Sort key selector must have at least one parameter. KeySelector={sortKeySelector}");
}
if (sortKeySelector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"Sort key selector must have a parameter type that matches the aggregate source. " +
$"KeySelector={sortKeySelector}, ExpectedType={sourceType}, ActualType={sortKeySelector.Parameters[0].Type}");
}
}
}
public static void ValidateGroupByAggregate(AggregateElement element)
{
var sourceType = element.Source.ValueType;
var resultType = element.ResultType;
var keySelector = element.Expressions[AggregateElement.KeySelectorName].Expression;
if (keySelector.Parameters.Count == 0)
{
throw new ArgumentException(
$"GroupBy key selector must have at least one parameter. KeySelector={keySelector}");
}
if (keySelector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"GroupBy key selector must have a parameter type that matches the aggregate source. " +
$"KeySelector={keySelector}, ExpectedType={sourceType}, ActualType={keySelector.Parameters[0].Type}");
}
var elementSelector = element.Expressions[AggregateElement.ElementSelectorName].Expression;
if (elementSelector.Parameters.Count == 0)
{
throw new ArgumentException(
$"GroupBy element selector must have at least one parameter. ElementSelector={elementSelector}");
}
if (elementSelector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"GroupBy element selector must have a parameter type that matches the aggregate source. " +
$"ElementSelector={elementSelector}, ExpectedType={sourceType}, ActualType={elementSelector.Parameters[0].Type}");
}
var groupType = typeof(IGrouping<,>).MakeGenericType(keySelector.ReturnType, elementSelector.ReturnType);
if (!resultType.IsAssignableFrom(groupType))
{
throw new ArgumentException(
"GroupBy key/element selectors must produce a grouping assignable to the aggregation result. " +
$"ElementSelector={elementSelector}, ResultType={resultType}, GroupingType={groupType}");
}
}
public static void ValidateProjectAggregate(AggregateElement element)
{
var sourceType = element.Source.ValueType;
var resultType = element.ResultType;
var selector = element.Expressions[AggregateElement.SelectorName].Expression;
if (selector.Parameters.Count == 0)
{
throw new ArgumentException(
$"Projection selector must have at least one parameter. Selector={selector}");
}
if (selector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"Projection selector must have its first parameter type that matches the aggregate source. " +
$"Selector={selector}, ExpectedType={sourceType}, ActualType={selector.Parameters[0].Type}");
}
if (!resultType.IsAssignableFrom(selector.ReturnType))
{
throw new ArgumentException(
"Projection selector must produce a value assignable to the aggregation result. " +
$"Selector={selector}, ResultType={resultType}, SelectorReturnType={selector.ReturnType}");
}
}
public static void ValidateFlattenAggregate(AggregateElement element)
{
var sourceType = element.Source.ValueType;
var resultType = element.ResultType;
var selector = element.Expressions[AggregateElement.SelectorName].Expression;
if (selector.Parameters.Count != 1)
{
throw new ArgumentException(
$"Flattening selector must have a single parameter. Selector={selector}");
}
if (selector.Parameters[0].Type != sourceType)
{
throw new ArgumentException(
"Flattening selector must have a parameter type that matches the aggregate source. " +
$"Selector={selector}, ExpectedType={sourceType}, ActualType={selector.Parameters[0].Type}");
}
var resultCollectionType = typeof(IEnumerable<>).MakeGenericType(resultType);
if (!resultCollectionType.IsAssignableFrom(selector.ReturnType))
{
throw new ArgumentException(
"Flattening selector must produce a collection of values that are assignable to the aggregation result. " +
$"Selector={selector}, ResultType={resultType}, SelectorReturnType={selector.ReturnType}");
}
}
public static void ValidatePattern(PatternElement element)
{
if (element.Source != null)
{
switch (element.Source.ElementType)
{
case ElementType.Aggregate:
case ElementType.Binding:
break;
default:
throw new ArgumentException($"Invalid source element. ElementType={element.Source.ElementType}");
}
}
}
public static void ValidateBinding(BindingElement element)
{
var resultType = element.ResultType;
var expressionReturnType = element.Expression.ReturnType;
if (!resultType.IsAssignableFrom(expressionReturnType))
{
throw new ArgumentException($"Binding expression not assignable to result type. ResultType={resultType}, ExpressionResult={expressionReturnType}");
}
}
public static void ValidateGroup(GroupElement element)
{
if (!element.ChildElements.Any())
{
throw new InvalidOperationException("Group element requires at least one child element");
}
foreach (var childElement in element.ChildElements)
{
switch (childElement.ElementType)
{
case ElementType.Pattern:
case ElementType.And:
case ElementType.Or:
case ElementType.Not:
case ElementType.Exists:
case ElementType.ForAll:
break;
default:
throw new ArgumentException($"Invalid element in the group. ElementType={childElement.ElementType}");
}
}
}
public static void ValidateExists(ExistsElement element)
{
switch (element.Source.ElementType)
{
case ElementType.Pattern:
case ElementType.And:
case ElementType.Or:
break;
default:
throw new ArgumentException($"Invalid source element. ElementType={element.Source.ElementType}");
}
}
public static void ValidateNot(NotElement element)
{
switch (element.Source.ElementType)
{
case ElementType.Pattern:
case ElementType.And:
case ElementType.Or:
break;
default:
throw new ArgumentException($"Invalid source element. ElementType={element.Source.ElementType}");
}
}
public static void ValidateForAll(ForAllElement element)
{
if (!element.Patterns.Any())
{
throw new InvalidOperationException("At least one FORALL pattern must be specified");
}
}
}
}
| |
/*
* REST API Documentation for the MOTI Hired Equipment Tracking System (HETS) Application
*
* The Hired Equipment Program is for owners/operators who have a dump truck, bulldozer, backhoe or other piece of equipment they want to hire out to the transportation ministry for day labour and emergency projects. The Hired Equipment Program distributes available work to local equipment owners. The program is based on seniority and is designed to deliver work to registered users fairly and efficiently through the development of local area call-out lists.
*
* OpenAPI spec version: v1
*
*
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Threading.Tasks;
using Xunit;
using HETSAPI;
using HETSAPI.Models;
using System.Reflection;
namespace HETSAPI.Test
{
/// <summary>
/// Class for testing the model Owner
/// </summary>
public class OwnerModelTests
{
// TODO uncomment below to declare an instance variable for Owner
private Owner instance;
/// <summary>
/// Setup the test.
/// </summary>
public OwnerModelTests()
{
instance = new Owner();
}
/// <summary>
/// Test an instance of Owner
/// </summary>
[Fact]
public void OwnerInstanceTest()
{
Assert.IsType<Owner>(instance);
}
/// <summary>
/// Test the property 'Id'
/// </summary>
[Fact]
public void IdTest()
{
Assert.IsType<int>(instance.Id);
}
/// <summary>
/// Test the property 'OwnerCd'
/// </summary>
[Fact]
public void OwnerCdTest()
{
// TODO unit test for the property 'OwnerCd'
Assert.True(true);
}
/// <summary>
/// Test the property 'OwnerFirstName'
/// </summary>
[Fact]
public void OwnerFirstNameTest()
{
// TODO unit test for the property 'OwnerFirstName'
Assert.True(true);
}
/// <summary>
/// Test the property 'OwnerLastName'
/// </summary>
[Fact]
public void OwnerLastNameTest()
{
// TODO unit test for the property 'OwnerLastName'
Assert.True(true);
}
/// <summary>
/// Test the property 'ContactPerson'
/// </summary>
[Fact]
public void ContactPersonTest()
{
// TODO unit test for the property 'ContactPerson'
Assert.True(true);
}
/// <summary>
/// Test the property 'LocalToArea'
/// </summary>
[Fact]
public void LocalToAreaTest()
{
// TODO unit test for the property 'LocalToArea'
Assert.True(true);
}
/// <summary>
/// Test the property 'MaintenanceContractor'
/// </summary>
[Fact]
public void MaintenanceContractorTest()
{
// TODO unit test for the property 'MaintenanceContractor'
Assert.True(true);
}
/// <summary>
/// Test the property 'Comment'
/// </summary>
[Fact]
public void CommentTest()
{
// TODO unit test for the property 'Comment'
Assert.True(true);
}
/// <summary>
/// Test the property 'WCBNum'
/// </summary>
[Fact]
public void WCBNumTest()
{
// TODO unit test for the property 'WCBNum'
Assert.True(true);
}
/// <summary>
/// Test the property 'WCBExpiryDate'
/// </summary>
[Fact]
public void WCBExpiryDateTest()
{
// TODO unit test for the property 'WCBExpiryDate'
Assert.True(true);
}
/// <summary>
/// Test the property 'CGLCompany'
/// </summary>
[Fact]
public void CGLCompanyTest()
{
// TODO unit test for the property 'CGLCompany'
Assert.True(true);
}
/// <summary>
/// Test the property 'CGLPolicy'
/// </summary>
[Fact]
public void CGLPolicyTest()
{
// TODO unit test for the property 'CGLPolicy'
Assert.True(true);
}
/// <summary>
/// Test the property 'CGLStartDate'
/// </summary>
[Fact]
public void CGLStartDateTest()
{
// TODO unit test for the property 'CGLStartDate'
Assert.True(true);
}
/// <summary>
/// Test the property 'CGLEndDate'
/// </summary>
[Fact]
public void CGLEndDateTest()
{
// TODO unit test for the property 'CGLEndDate'
Assert.True(true);
}
/// <summary>
/// Test the property 'StatusCd'
/// </summary>
[Fact]
public void StatusCdTest()
{
// TODO unit test for the property 'StatusCd'
Assert.True(true);
}
/// <summary>
/// Test the property 'ArchiveCd'
/// </summary>
[Fact]
public void ArchiveCdTest()
{
// TODO unit test for the property 'ArchiveCd'
Assert.True(true);
}
/// <summary>
/// Test the property 'ArchiveReason'
/// </summary>
[Fact]
public void ArchiveReasonTest()
{
// TODO unit test for the property 'ArchiveReason'
Assert.True(true);
}
/// <summary>
/// Test the property 'LocalArea'
/// </summary>
[Fact]
public void LocalAreaTest()
{
// TODO unit test for the property 'LocalArea'
Assert.True(true);
}
/// <summary>
/// Test the property 'PrimaryContact'
/// </summary>
[Fact]
public void PrimaryContactTest()
{
// TODO unit test for the property 'PrimaryContact'
Assert.True(true);
}
/// <summary>
/// Test the property 'Contacts'
/// </summary>
[Fact]
public void ContactsTest()
{
// TODO unit test for the property 'Contacts'
Assert.True(true);
}
/// <summary>
/// Test the property 'Notes'
/// </summary>
[Fact]
public void NotesTest()
{
// TODO unit test for the property 'Notes'
Assert.True(true);
}
/// <summary>
/// Test the property 'Attachments'
/// </summary>
[Fact]
public void AttachmentsTest()
{
// TODO unit test for the property 'Attachments'
Assert.True(true);
}
/// <summary>
/// Test the property 'History'
/// </summary>
[Fact]
public void HistoryTest()
{
// TODO unit test for the property 'History'
Assert.True(true);
}
/// <summary>
/// Test the property 'EquipmentList'
/// </summary>
[Fact]
public void EquipmentListTest()
{
// TODO unit test for the property 'EquipmentList'
Assert.True(true);
}
}
}
| |
/* ====================================================================
Copyright (C) 2004-2008 fyiReporting Software, LLC
This file is part of the fyiReporting RDL project.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For additional information, email info@fyireporting.com or visit
the website www.fyiReporting.com.
*/
using System;
using System.Xml;
using System.IO;
using System.Collections;
using System.Text;
using System.Drawing;
namespace fyiReporting.RdlDesign
{
/// <summary>
/// MatrixView: builds a simplfied representation of the matrix so that it
/// can be drawn or hit test in a simplified fashion.
/// </summary>
internal class MatrixView
{
DesignXmlDraw _Draw;
XmlNode _MatrixNode;
int _Rows;
int _HeaderRows;
int _Columns;
int _HeaderColumns;
float _Height;
float _Width;
MatrixItem[,] _MatrixView;
string _ViewBuilt=null;
internal MatrixView(DesignXmlDraw dxDraw, XmlNode matrix)
{
_Draw = dxDraw;
_MatrixNode = matrix;
try
{
BuildView();
}
catch (Exception e)
{
_ViewBuilt = e.Message;
}
}
internal MatrixItem this[int row, int column]
{
get {return _MatrixView[row, column]; }
}
internal int Columns
{
get {return _Columns;}
}
internal int Rows
{
get {return _Rows;}
}
internal int HeaderColumns
{
get {return _HeaderColumns;}
}
internal int HeaderRows
{
get {return _HeaderRows;}
}
internal float Height
{
get {return _Height;}
}
internal float Width
{
get {return _Width;}
}
void BuildView()
{
CountRowColumns(); // get the total count of rows and columns
_MatrixView = new MatrixItem[_Rows, _Columns]; // allocate the 2-dimensional array
FillMatrix();
}
void CountRowColumns()
{
int mcc = CountMatrixColumns();
int mrc = CountMatrixRows();
int iColumnGroupings = this.CountColumnGroupings();
int iRowGroupings = this.CountRowGroupings();
_Rows = mrc + this.CountColumnGroupings() +
CountRowGroupingSubtotals() * mrc;
_Columns = mcc + iRowGroupings +
CountColumnGroupingSubtotals() * mcc;
_HeaderRows = iColumnGroupings;
_HeaderColumns = iRowGroupings;
}
void FillMatrix()
{
FillMatrixColumnGroupings();
FillMatrixRowGroupings();
FillMatrixCorner();
FillMatrixCells();
FillMatrixHeights();
FillMatrixWidths();
FillMatrixCornerHeightWidth();
}
void FillMatrixHeights()
{
// fill out the heights for each row
this._Height = 0;
for (int row=0; row < this.Rows; row++)
{
float height=0;
for (int col= 0; col < this.Columns; col++)
{
MatrixItem mi = _MatrixView[row,col];
height = Math.Max(height, mi.Height);
}
for (int col= 0; col < this.Columns; col++)
_MatrixView[row,col].Height = height;
this._Height += height;
}
}
void FillMatrixWidths()
{
// fill out the widths for each column
this._Width = 0;
for (int col=0; col < this.Columns; col++)
{
float width=0;
for (int row= 0; row < this.Rows; row++)
{
MatrixItem mi = _MatrixView[row,col];
width = Math.Max(width, mi.Width);
}
for (int row= 0; row < this.Rows; row++)
_MatrixView[row,col].Width = width;
this._Width += width;
}
}
void FillMatrixCornerHeightWidth()
{
if (this.Columns == 0 || this.Rows == 0)
return;
// set the height and width for the corner
MatrixItem mi = _MatrixView[0,0];
mi.Height = 0;
for (int row=0; row < this._HeaderRows; row++)
mi.Height += _MatrixView[row, 1].Height;
mi.Width = 0;
for (int col=0; col < this._HeaderColumns; col++)
mi.Width += _MatrixView[1, col].Width;
}
void FillMatrixCells()
{
// get a collection with the matrix cells
int staticRows = this.CountMatrixRows();
int staticCols = this.CountMatrixColumns();
XmlNode[,] rc = new XmlNode[staticRows, staticCols];
XmlNode mrows = DesignXmlDraw.FindNextInHierarchy(_MatrixNode, "MatrixRows");
int ri=0;
foreach (XmlNode mrow in mrows.ChildNodes)
{
int ci=0;
XmlNode mcells = DesignXmlDraw.FindNextInHierarchy(mrow, "MatrixCells");
foreach (XmlNode mcell in mcells.ChildNodes)
{
// obtain the matrix cell
XmlNode repi = DesignXmlDraw.FindNextInHierarchy(mcell, "ReportItems");
rc[ri,ci] = repi;
ci++;
}
ri++;
}
// now fill out the rest of the matrix with empty entries
MatrixItem mi;
// Fill the middle (MatrixCells) with the contents of MatrixCells repeated
for (int row=_HeaderRows; row < this.Rows; row++)
{
int rowcell = staticRows == 0? 0: (row - _HeaderRows) % staticRows;
int mcellCount=0;
for (int col= _HeaderColumns; col < this.Columns; col++)
{
if (_MatrixView[row, col] == null)
{
float width = GetMatrixColumnWidth(mcellCount);
float height = GetMatrixRowHeight(rowcell);
XmlNode n = rc[rowcell, mcellCount++] as XmlNode;
if (mcellCount >= staticCols)
mcellCount=0;
mi = new MatrixItem(n);
mi.Width = width;
mi.Height = height;
_MatrixView[row, col] = mi;
}
}
}
// Make sure we have no null entries
for (int row=0; row < this.Rows; row++)
{
for (int col= 0; col < this.Columns; col++)
{
if (_MatrixView[row, col] == null)
{
mi = new MatrixItem(null);
_MatrixView[row, col] = mi;
}
}
}
}
void FillMatrixCorner()
{
XmlNode corner = _Draw.GetNamedChildNode(_MatrixNode, "Corner");
if (corner == null)
return;
XmlNode ris = DesignXmlDraw.FindNextInHierarchy(corner, "ReportItems");
MatrixItem mi = new MatrixItem(ris);
_MatrixView[0,0] = mi;
}
float GetMatrixColumnWidth(int count)
{
XmlNode mcs = DesignXmlDraw.FindNextInHierarchy(_MatrixNode, "MatrixColumns");
foreach (XmlNode c in mcs.ChildNodes)
{
if (c.Name != "MatrixColumn")
continue;
if (count == 0)
return _Draw.GetSize(c, "Width");
count--;
}
return 0;
}
void FillMatrixColumnGroupings()
{
XmlNode cGroupings = _Draw.GetNamedChildNode(_MatrixNode, "ColumnGroupings");
if (cGroupings == null)
return;
int rows=0;
int cols=this._HeaderColumns;
MatrixItem mi;
XmlNode ris; // work variable to hold reportitems
int staticCols = this.CountMatrixColumns();
int subTotalCols=DesignXmlDraw.CountChildren(cGroupings, "ColumnGrouping", "DynamicColumns", "Subtotal");
foreach (XmlNode c in cGroupings.ChildNodes)
{
if (c.Name != "ColumnGrouping")
continue;
XmlNode scol = DesignXmlDraw.FindNextInHierarchy(c, "StaticColumns");
if (scol != null)
{ // Static columns
int ci=0;
foreach (XmlNode sc in scol.ChildNodes)
{
if (sc.Name != "StaticColumn")
continue;
ris = DesignXmlDraw.FindNextInHierarchy(sc, "ReportItems");
mi = new MatrixItem(ris);
mi.Height = _Draw.GetSize(c, "Height");
mi.Width = GetMatrixColumnWidth(ci);
_MatrixView[rows, _HeaderColumns+ci] = mi;
ci++;
}
}
else
{ // Dynamic Columns
ris = DesignXmlDraw.FindNextInHierarchy(c, "DynamicColumns", "ReportItems");
mi = new MatrixItem(ris);
mi.Height = _Draw.GetSize(c, "Height");
mi.Width = GetMatrixColumnWidth(0);
_MatrixView[rows, _HeaderColumns] = mi;
XmlNode subtotal = DesignXmlDraw.FindNextInHierarchy(c, "DynamicColumns", "Subtotal");
if (subtotal != null)
{
ris = DesignXmlDraw.FindNextInHierarchy(subtotal, "ReportItems");
mi = new MatrixItem(ris);
mi.Height = _Draw.GetSize(c, "Height");
mi.Width = GetMatrixColumnWidth(0); // TODO this is wrong!! should be total of all static widths
_MatrixView[rows, _HeaderColumns+(staticCols-1)+subTotalCols] = mi;
subTotalCols--;
}
}
rows++; // add a row per ColumnGrouping
}
}
float GetMatrixRowHeight(int count)
{
XmlNode mcs = DesignXmlDraw.FindNextInHierarchy(_MatrixNode, "MatrixRows");
foreach (XmlNode c in mcs.ChildNodes)
{
if (c.Name != "MatrixRow")
continue;
if (count == 0)
return _Draw.GetSize(c, "Height");
count--;
}
return 0;
}
void FillMatrixRowGroupings()
{
XmlNode rGroupings = _Draw.GetNamedChildNode(_MatrixNode, "RowGroupings");
if (rGroupings == null)
return;
float height = _Draw.GetSize(
DesignXmlDraw.FindNextInHierarchy(_MatrixNode, "MatrixRows", "MatrixRow"),
"Height");
int cols = 0;
int staticRows = this.CountMatrixRows();
int subtotalrows= DesignXmlDraw.CountChildren(rGroupings, "RowGrouping", "DynamicRows", "Subtotal");
MatrixItem mi;
foreach (XmlNode c in rGroupings.ChildNodes)
{
if (c.Name != "RowGrouping")
continue;
XmlNode srow = DesignXmlDraw.FindNextInHierarchy(c, "StaticRows");
if (srow != null)
{ // Static rows
int ri=0;
foreach (XmlNode sr in srow.ChildNodes)
{
if (sr.Name != "StaticRow")
continue;
XmlNode ris = DesignXmlDraw.FindNextInHierarchy(sr, "ReportItems");
mi = new MatrixItem(ris);
mi.Width = _Draw.GetSize(c, "Width");
mi.Height = GetMatrixRowHeight(ri);
_MatrixView[_HeaderRows+ri, cols] = mi;
ri++;
}
}
else
{
XmlNode ris = DesignXmlDraw.FindNextInHierarchy(c, "DynamicRows", "ReportItems");
mi = new MatrixItem(ris);
mi.Width = _Draw.GetSize(c, "Width");
mi.Height = height;
_MatrixView[_HeaderRows, cols] = mi;
XmlNode subtotal = DesignXmlDraw.FindNextInHierarchy(c, "DynamicRows", "Subtotal");
if (subtotal != null)
{
ris = DesignXmlDraw.FindNextInHierarchy(subtotal, "ReportItems");
mi = new MatrixItem(ris);
mi.Width = _Draw.GetSize(c, "Width");
mi.Height = height;
_MatrixView[_HeaderRows+(staticRows-1)+subtotalrows, cols] = mi;
subtotalrows--; // these go backwards
}
}
cols++; // add a column per RowGrouping
}
}
/// <summary>
/// Returns the count of static columns or 1
/// </summary>
/// <returns></returns>
int CountMatrixColumns()
{
XmlNode cGroupings = _Draw.GetNamedChildNode(_MatrixNode, "ColumnGroupings");
if (cGroupings == null)
return 1; // 1 column
// Get the number of static columns
foreach (XmlNode c in cGroupings.ChildNodes)
{
if (c.Name != "ColumnGrouping")
continue;
XmlNode scol = DesignXmlDraw.FindNextInHierarchy(c, "StaticColumns");
if (scol == null) // must be dynamic column
continue;
int ci=0;
foreach (XmlNode sc in scol.ChildNodes)
{
if (sc.Name == "StaticColumn")
ci++;
}
return ci; // only one StaticColumns allowed in a column grouping
}
return 1; // 1 column
}
/// <summary>
/// Returns the count of static rows or 1
/// </summary>
/// <returns></returns>
int CountMatrixRows()
{
XmlNode rGroupings = _Draw.GetNamedChildNode(_MatrixNode, "RowGroupings");
if (rGroupings == null)
return 1; // 1 row
// Get the number of static columns
foreach (XmlNode c in rGroupings.ChildNodes)
{
if (c.Name != "RowGrouping")
continue;
XmlNode scol = DesignXmlDraw.FindNextInHierarchy(c, "StaticRows");
if (scol == null) // must be dynamic column
continue;
int ci=0;
foreach (XmlNode sc in scol.ChildNodes)
{
if (sc.Name == "StaticRow")
ci++;
}
return ci; // only one StaticRows allowed in a row grouping
}
return 1; // 1 row
}
/// <summary>
/// Returns the count of ColumnGroupings
/// </summary>
/// <returns></returns>
int CountColumnGroupings()
{
XmlNode cGroupings = _Draw.GetNamedChildNode(_MatrixNode, "ColumnGroupings");
if (cGroupings == null)
return 0;
// Get the number of column groups
int ci=0;
foreach (XmlNode c in cGroupings.ChildNodes)
{
if (c.Name != "ColumnGrouping")
continue;
ci++;
}
return ci;
}
/// <summary>
/// Returns the count of row grouping
/// </summary>
/// <returns></returns>
int CountRowGroupings()
{
XmlNode rGroupings = _Draw.GetNamedChildNode(_MatrixNode, "RowGroupings");
if (rGroupings == null)
return 0; // 1 row
// Get the number of row groupings
int ri=0;
foreach (XmlNode c in rGroupings.ChildNodes)
{
if (c.Name != "RowGrouping")
continue;
ri++;
}
return ri; // row groupings
}
/// <summary>
/// Returns the count of ColumnGroupings with subtotals
/// </summary>
/// <returns></returns>
int CountColumnGroupingSubtotals()
{
XmlNode cGroupings = _Draw.GetNamedChildNode(_MatrixNode, "ColumnGroupings");
if (cGroupings == null)
return 0;
// Get the number of column groups with subtotals
int ci=0;
foreach (XmlNode c in cGroupings.ChildNodes)
{
if (c.Name != "ColumnGrouping")
continue;
XmlNode subtotal = DesignXmlDraw.FindNextInHierarchy(c, "DynamicColumns", "Subtotal");
if (subtotal != null)
ci++;
}
return ci;
}
/// <summary>
/// Returns the count of row grouping subtotals
/// </summary>
/// <returns></returns>
int CountRowGroupingSubtotals()
{
XmlNode rGroupings = _Draw.GetNamedChildNode(_MatrixNode, "RowGroupings");
if (rGroupings == null)
return 0; // 1 row
// Get the number of row groupings
int ri=0;
foreach (XmlNode c in rGroupings.ChildNodes)
{
if (c.Name != "RowGrouping")
continue;
XmlNode subtotal = DesignXmlDraw.FindNextInHierarchy(c, "DynamicRows", "Subtotal");
if (subtotal != null)
ri++;
}
return ri; // row grouping subtotals
}
}
class MatrixItem
{
XmlNode _ReportItem;
float _Width;
float _Height;
internal MatrixItem(XmlNode ri)
{
_ReportItem = ri;
}
internal XmlNode ReportItem
{
get {return _ReportItem;}
set {_ReportItem = value;}
}
internal float Width
{
get {return _Width;}
set {_Width = value;}
}
internal float Height
{
get {return _Height;}
set {_Height = value;}
}
}
}
| |
using System;
using System.IO;
using System.Collections;
using System.Collections.Specialized;
using System.Drawing;
using System.Drawing.Imaging;
using SpiffLib;
namespace TemplateExtractor
{
/// <summary>
/// Summary description for App.
/// </summary>
class App
{
static string gstrTemplateBitmap;
static string gstrTerrainBitmap;
static string gstrColorsBitmap;
static string gstrTemplateNames;
static string gstrOutputPrefix;
static string gstrTileCollection;
static int gcxTile = 24, gcyTile = 24;
static BitmapData gbmd;
static Color gclrTransparent;
static int[,] gaCells;
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static unsafe int Main(string[] astrArgs)
{
#if false
while (true) {
float h, s, l;
float r, g, b;
string str = Console.ReadLine();
r = float.Parse(str);
str = Console.ReadLine();
g = float.Parse(str);
str = Console.ReadLine();
b = float.Parse(str);
// r = .5f; g = .5f; b = 1.0f;
Misc.RgbToHsl(r, g, b, &h, &s, &l);
Console.WriteLine("r: {3}, g: {4}, b: {5} -> h: {0}, s: {1}, l: {2}", h, s, l, r, g, b);
Misc.HslToRgb(h, s, l, &r, &g, &b);
Console.WriteLine("r: {3}, g: {4}, b: {5} <- h: {0}, s: {1}, l: {2}", h, s, l, r, g, b);
}
return 0;
#endif
// Command-line argument processing
if (astrArgs.Length == 0) {
PrintHelp();
return 0;
}
for (int i = 0; i < astrArgs.Length; i++) {
switch (astrArgs[i]) {
case "-?":
PrintHelp();
return 0;
case "-n":
gstrTemplateNames = astrArgs[++i];
break;
case "-tc":
gstrTileCollection = astrArgs[++i];
break;
case "-art":
gstrTemplateBitmap = astrArgs[++i];
break;
case "-ter":
gstrTerrainBitmap = astrArgs[++i];
break;
case "-colors":
gstrColorsBitmap = astrArgs[++i];
break;
case "-ts":
gcxTile = int.Parse(astrArgs[++i]);
gcyTile = gcxTile;
break;
default:
Console.WriteLine("Error: invalid argument '{0}'", astrArgs[i]);
return 1;
}
}
Color clrBlocked = Color.FromArgb(255, 0, 0);
Bitmap bmTerrain = null;
if (gstrTerrainBitmap != null)
bmTerrain = new Bitmap(gstrTerrainBitmap);
Bitmap bmColors = null;
if (gstrColorsBitmap != null)
bmColors = new Bitmap(gstrColorsBitmap);
if (gstrTemplateBitmap == null) {
Console.WriteLine("Error: A valid source bitmap must be specified");
return -1;
}
if (gstrOutputPrefix == null)
gstrOutputPrefix = Path.GetFileNameWithoutExtension(gstrTemplateBitmap);
Console.WriteLine("Reading {0}", gstrTemplateBitmap);
Bitmap bmFile = null;
try {
bmFile = new Bitmap(gstrTemplateBitmap);
} catch {
Console.WriteLine("Error: {0} is not a recognized bitmap file", gstrTemplateBitmap);
return -1;
}
// Using DrawImage to copy subrectangles of a loaded bitmap doesn't work reliably
// due to GDI+'s desire to scale based on Bitmap.Horizontal/VerticalResolution so
// we must create a neutral resolution bitmap before doing any DrawImage'ing from it.
Console.WriteLine("Creating neutral resolution source bitmap", gstrTemplateBitmap);
Bitmap bm = SpiffLib.Misc.NormalizeBitmap(bmFile);
bmFile.Dispose();
// The upper-left-most pixel defines the transparent color
gclrTransparent = bm.GetPixel(0, 0);
//
int ctx = bm.Width / gcxTile;
int cty = bm.Height / gcyTile;
gaCells = new int[ctx, cty];
for (int j = 0; j < cty; j++) {
for (int i = 0; i < ctx; i++) {
gaCells[i, j] = -1; // -1 = empty
}
}
ArrayList alTemplates = new ArrayList();
Console.WriteLine("Scanning for templates", alTemplates.Count);
// Lock down bits for speed
Rectangle rc = new Rectangle(0, 0, bm.Width, bm.Height);
gbmd = bm.LockBits(rc, ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
byte *pbBase = (byte *)gbmd.Scan0.ToPointer();
for (int y = gcyTile, ty = 1; y < bm.Height - gcyTile; y += gcyTile, ty++) {
for (int x = gcxTile, tx = 1; x < bm.Width - gcxTile; x += gcxTile, tx++) {
byte *pb = pbBase + y * gbmd.Stride + x * 3;
Color clr = Color.FromArgb(pb[2], pb[1], pb[0]);
if (clr != gclrTransparent && gaCells[tx, ty] == -1) {
ArrayList alTiles = new ArrayList();
FloodFill(pbBase, alTiles, tx, ty);
alTemplates.Add(alTiles);
}
}
}
bm.UnlockBits(gbmd);
StreamReader stmrTemplateNames = null;
if (gstrTemplateNames != null)
stmrTemplateNames = new StreamReader(gstrTemplateNames);
Console.WriteLine("Extracting {0} templates", alTemplates.Count);
m.DocManager.AddTemplate(new m.TemplateDocTemplate());
m.TemplateDoc tmpd = (m.TemplateDoc)m.DocManager.NewDocument(typeof(m.TemplateDoc), new Object[] { new Size(gcxTile, gcyTile) });
m.Template[] atmpl = new m.Template[alTemplates.Count];
// Create a new bitmap for each template. Empty cells of the template are
// filled with the transparent color.
int cTiles = 0;
int nTemplate = 0;
foreach (ArrayList alTiles in alTemplates) {
cTiles += alTiles.Count;
// Found boundary of template
int txMax = -1, tyMax = -1;
int txMin = 99999, tyMin = 99999;
foreach (Point pt in alTiles) {
if (pt.X < txMin)
txMin = pt.X;
if (pt.X > txMax)
txMax = pt.X;
if (pt.Y < tyMin)
tyMin = pt.Y;
if (pt.Y > tyMax)
tyMax = pt.Y;
}
//
ctx = txMax + 1 - txMin;
cty = tyMax + 1 - tyMin;
int cx = ctx * gcxTile;
int cy = cty * gcyTile;
Bitmap bmTemplate = new Bitmap(cx, cy, PixelFormat.Format24bppRgb);
Graphics gTemplate = Graphics.FromImage(bmTemplate);
Rectangle rcSrc = new Rectangle(txMin * gcxTile, tyMin * gcyTile, cx, cy);
gTemplate.DrawImage(bm, 0, 0, rcSrc, GraphicsUnit.Pixel);
string strT;
if (gstrTemplateNames != null) {
strT = stmrTemplateNames.ReadLine();
if (strT == null) // end of template names reached
strT = String.Format("template{0:0#}", nTemplate);
} else {
strT = String.Format("{0}{1:0#}", gstrOutputPrefix, nTemplate);
}
if (gstrTileCollection == null) {
bmTemplate.Save(strT + ".png", ImageFormat.Png);
bmTemplate.Dispose();
} else {
m.Template tmpl = new m.Template(tmpd, bmTemplate, strT);
if (bmTerrain != null) {
tmpl.TerrainMap = new m.TerrainTypes[cty, ctx];
for (int j = 0; j < cty; j++) {
for (int i = 0; i < ctx; i++) {
if (bmTerrain.GetPixel((txMin + i) * gcxTile, (tyMin + j) * gcyTile) == clrBlocked)
tmpl.TerrainMap[j, i] = m.TerrainTypes.Blocked;
else
tmpl.TerrainMap[j, i] = m.TerrainTypes.Open;
}
}
}
if (bmColors != null) {
Color[] aclr = new Color[4] {
Color.FromArgb(114, 167, 48), // grass
Color.FromArgb(81, 142, 118), // cliff
Color.FromArgb(1, 172, 254), // water
Color.FromArgb(174, 168, 99), // road
};
tmpl.TerrainColors = new m.TerrainColors[cty * 2, ctx * 2];
for (int yT = 0; yT < cty * 2; yT++) {
for (int xT = 0; xT < ctx * 2; xT++) {
Color clr = bmColors.GetPixel(txMin * gcxTile + xT * gcxTile / 2, tyMin * gcyTile + yT * gcyTile / 2);
m.TerrainColors tclr = m.TerrainColors.Grass;
for (int i = 0; i < aclr.Length; i++) {
if (clr == aclr[i]) {
tclr = (m.TerrainColors)i;
break;
}
}
tmpl.TerrainColors[yT, xT] = tclr;
}
}
}
if (strT.ToLower() == "background")
tmpd.SetBackgroundTemplate(tmpl);
atmpl[nTemplate] = tmpl;
}
gTemplate.Dispose();
nTemplate++;
}
if (stmrTemplateNames != null)
stmrTemplateNames.Close();
if (gstrTileCollection != null) {
Console.WriteLine("Writing templates to {0}", gstrTileCollection);
tmpd.AddTemplates(atmpl);
tmpd.SaveAs(gstrTileCollection);
}
Console.WriteLine("{0} tiles total in {1} templates ({2:###,###,###} bytes)", cTiles, alTemplates.Count, cTiles * gcxTile * gcyTile);
return 0;
}
static unsafe private void FloodFill(byte *pbBase, ArrayList alTiles, int tx, int ty) {
// Has this cell already been filled?
if (gaCells[tx, ty] != -1)
return;
byte *pb = pbBase + (ty * gcyTile) * gbmd.Stride + (tx * gcxTile) * 3;
Color clr = Color.FromArgb(pb[2], pb[1], pb[0]);
if (clr == gclrTransparent)
return;
gaCells[tx, ty] = 1;
alTiles.Add(new Point(tx, ty));
FloodFill(pbBase, alTiles, tx - 1, ty - 1);
FloodFill(pbBase, alTiles, tx, ty - 1);
FloodFill(pbBase, alTiles, tx + 1, ty - 1);
FloodFill(pbBase, alTiles, tx - 1, ty);
FloodFill(pbBase, alTiles, tx + 1, ty);
FloodFill(pbBase, alTiles, tx - 1, ty + 1);
FloodFill(pbBase, alTiles, tx, ty + 1);
FloodFill(pbBase, alTiles, tx + 1, ty + 1);
}
//
static void PrintHelp() {
Console.WriteLine(
"Usage: TemplateExtractor -art <source bitmap> [-n names file] [-tc template collection] [-ter terrain bitmap]\n" +
"-art source bitmap: bitmap file containing templates to be processed.\n" +
"-n names file: file containing template names, one per line\n" +
"-tc template collection: name of template collection to output (instead of .pngs)\n" +
"-ter terrain bitmap: bitmap file containing terrain info to be processed.");
}
}
}
| |
// Copyright (c) 2007-2017 ppy Pty Ltd <contact@ppy.sh>.
// Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu-framework/master/LICENCE
namespace osu.Framework.Input.Bindings
{
/// <summary>
/// A collection of keys, mouse and other controllers' buttons.
/// </summary>
public enum InputKey
{
/// <summary>
/// No key pressed.
/// </summary>
None = 0,
/// <summary>
/// The shift key.
/// </summary>
Shift = 1,
/// <summary>
/// The control key.
/// </summary>
Control = 3,
/// <summary>
/// The alt key.
/// </summary>
Alt = 5,
/// <summary>
/// The win key.
/// </summary>
Win = 7,
/// <summary>
/// The menu key.
/// </summary>
Menu = 9,
/// <summary>
/// The F1 key.
/// </summary>
F1 = 10,
/// <summary>
/// The F2 key.
/// </summary>
F2 = 11,
/// <summary>
/// The F3 key.
/// </summary>
F3 = 12,
/// <summary>
/// The F4 key.
/// </summary>
F4 = 13,
/// <summary>
/// The F5 key.
/// </summary>
F5 = 14,
/// <summary>
/// The F6 key.
/// </summary>
F6 = 15,
/// <summary>
/// The F7 key.
/// </summary>
F7 = 16,
/// <summary>
/// The F8 key.
/// </summary>
F8 = 17,
/// <summary>
/// The F9 key.
/// </summary>
F9 = 18,
/// <summary>
/// The F10 key.
/// </summary>
F10 = 19,
/// <summary>
/// The F11 key.
/// </summary>
F11 = 20,
/// <summary>
/// The F12 key.
/// </summary>
F12 = 21,
/// <summary>
/// The F13 key.
/// </summary>
F13 = 22,
/// <summary>
/// The F14 key.
/// </summary>
F14 = 23,
/// <summary>
/// The F15 key.
/// </summary>
F15 = 24,
/// <summary>
/// The F16 key.
/// </summary>
F16 = 25,
/// <summary>
/// The F17 key.
/// </summary>
F17 = 26,
/// <summary>
/// The F18 key.
/// </summary>
F18 = 27,
/// <summary>
/// The F19 key.
/// </summary>
F19 = 28,
/// <summary>
/// The F20 key.
/// </summary>
F20 = 29,
/// <summary>
/// The F21 key.
/// </summary>
F21 = 30,
/// <summary>
/// The F22 key.
/// </summary>
F22 = 31,
/// <summary>
/// The F23 key.
/// </summary>
F23 = 32,
/// <summary>
/// The F24 key.
/// </summary>
F24 = 33,
/// <summary>
/// The F25 key.
/// </summary>
F25 = 34,
/// <summary>
/// The F26 key.
/// </summary>
F26 = 35,
/// <summary>
/// The F27 key.
/// </summary>
F27 = 36,
/// <summary>
/// The F28 key.
/// </summary>
F28 = 37,
/// <summary>
/// The F29 key.
/// </summary>
F29 = 38,
/// <summary>
/// The F30 key.
/// </summary>
F30 = 39,
/// <summary>
/// The F31 key.
/// </summary>
F31 = 40,
/// <summary>
/// The F32 key.
/// </summary>
F32 = 41,
/// <summary>
/// The F33 key.
/// </summary>
F33 = 42,
/// <summary>
/// The F34 key.
/// </summary>
F34 = 43,
/// <summary>
/// The F35 key.
/// </summary>
F35 = 44,
/// <summary>
/// The up arrow key.
/// </summary>
Up = 45,
/// <summary>
/// The down arrow key.
/// </summary>
Down = 46,
/// <summary>
/// The left arrow key.
/// </summary>
Left = 47,
/// <summary>
/// The right arrow key.
/// </summary>
Right = 48,
/// <summary>
/// The enter key.
/// </summary>
Enter = 49,
/// <summary>
/// The escape key.
/// </summary>
Escape = 50,
/// <summary>
/// The space key.
/// </summary>
Space = 51,
/// <summary>
/// The tab key.
/// </summary>
Tab = 52,
/// <summary>
/// The backspace key.
/// </summary>
BackSpace = 53,
/// <summary>
/// The backspace key (equivalent to BackSpace).
/// </summary>
Back = 53,
/// <summary>
/// The insert key.
/// </summary>
Insert = 54,
/// <summary>
/// The delete key.
/// </summary>
Delete = 55,
/// <summary>
/// The page up key.
/// </summary>
PageUp = 56,
/// <summary>
/// The page down key.
/// </summary>
PageDown = 57,
/// <summary>
/// The home key.
/// </summary>
Home = 58,
/// <summary>
/// The end key.
/// </summary>
End = 59,
/// <summary>
/// The caps lock key.
/// </summary>
CapsLock = 60,
/// <summary>
/// The scroll lock key.
/// </summary>
ScrollLock = 61,
/// <summary>
/// The print screen key.
/// </summary>
PrintScreen = 62,
/// <summary>
/// The pause key.
/// </summary>
Pause = 63,
/// <summary>
/// The num lock key.
/// </summary>
NumLock = 64,
/// <summary>
/// The clear key (Keypad5 with NumLock disabled, on typical keyboards).
/// </summary>
Clear = 65,
/// <summary>
/// The sleep key.
/// </summary>
Sleep = 66,
/// <summary>
/// The keypad 0 key.
/// </summary>
Keypad0 = 67,
/// <summary>
/// The keypad 1 key.
/// </summary>
Keypad1 = 68,
/// <summary>
/// The keypad 2 key.
/// </summary>
Keypad2 = 69,
/// <summary>
/// The keypad 3 key.
/// </summary>
Keypad3 = 70,
/// <summary>
/// The keypad 4 key.
/// </summary>
Keypad4 = 71,
/// <summary>
/// The keypad 5 key.
/// </summary>
Keypad5 = 72,
/// <summary>
/// The keypad 6 key.
/// </summary>
Keypad6 = 73,
/// <summary>
/// The keypad 7 key.
/// </summary>
Keypad7 = 74,
/// <summary>
/// The keypad 8 key.
/// </summary>
Keypad8 = 75,
/// <summary>
/// The keypad 9 key.
/// </summary>
Keypad9 = 76,
/// <summary>
/// The keypad divide key.
/// </summary>
KeypadDivide = 77,
/// <summary>
/// The keypad multiply key.
/// </summary>
KeypadMultiply = 78,
/// <summary>
/// The keypad subtract key.
/// </summary>
KeypadSubtract = 79,
/// <summary>
/// The keypad minus key (equivalent to KeypadSubtract).
/// </summary>
KeypadMinus = 79,
/// <summary>
/// The keypad add key.
/// </summary>
KeypadAdd = 80,
/// <summary>
/// The keypad plus key (equivalent to KeypadAdd).
/// </summary>
KeypadPlus = 80,
/// <summary>
/// The keypad decimal key.
/// </summary>
KeypadDecimal = 81,
/// <summary>
/// The keypad period key (equivalent to KeypadDecimal).
/// </summary>
KeypadPeriod = 81,
/// <summary>
/// The keypad enter key.
/// </summary>
KeypadEnter = 82,
/// <summary>
/// The A key.
/// </summary>
A = 83,
/// <summary>
/// The B key.
/// </summary>
B = 84,
/// <summary>
/// The C key.
/// </summary>
C = 85,
/// <summary>
/// The D key.
/// </summary>
D = 86,
/// <summary>
/// The E key.
/// </summary>
E = 87,
/// <summary>
/// The F key.
/// </summary>
F = 88,
/// <summary>
/// The G key.
/// </summary>
G = 89,
/// <summary>
/// The H key.
/// </summary>
H = 90,
/// <summary>
/// The I key.
/// </summary>
I = 91,
/// <summary>
/// The J key.
/// </summary>
J = 92,
/// <summary>
/// The K key.
/// </summary>
K = 93,
/// <summary>
/// The L key.
/// </summary>
L = 94,
/// <summary>
/// The M key.
/// </summary>
M = 95,
/// <summary>
/// The N key.
/// </summary>
N = 96,
/// <summary>
/// The O key.
/// </summary>
O = 97,
/// <summary>
/// The P key.
/// </summary>
P = 98,
/// <summary>
/// The Q key.
/// </summary>
Q = 99,
/// <summary>
/// The R key.
/// </summary>
R = 100,
/// <summary>
/// The S key.
/// </summary>
S = 101,
/// <summary>
/// The T key.
/// </summary>
T = 102,
/// <summary>
/// The U key.
/// </summary>
U = 103,
/// <summary>
/// The V key.
/// </summary>
V = 104,
/// <summary>
/// The W key.
/// </summary>
W = 105,
/// <summary>
/// The X key.
/// </summary>
X = 106,
/// <summary>
/// The Y key.
/// </summary>
Y = 107,
/// <summary>
/// The Z key.
/// </summary>
Z = 108,
/// <summary>
/// The number 0 key.
/// </summary>
Number0 = 109,
/// <summary>
/// The number 1 key.
/// </summary>
Number1 = 110,
/// <summary>
/// The number 2 key.
/// </summary>
Number2 = 111,
/// <summary>
/// The number 3 key.
/// </summary>
Number3 = 112,
/// <summary>
/// The number 4 key.
/// </summary>
Number4 = 113,
/// <summary>
/// The number 5 key.
/// </summary>
Number5 = 114,
/// <summary>
/// The number 6 key.
/// </summary>
Number6 = 115,
/// <summary>
/// The number 7 key.
/// </summary>
Number7 = 116,
/// <summary>
/// The number 8 key.
/// </summary>
Number8 = 117,
/// <summary>
/// The number 9 key.
/// </summary>
Number9 = 118,
/// <summary>
/// The tilde key.
/// </summary>
Tilde = 119,
/// <summary>
/// The grave key (equivaent to Tilde).
/// </summary>
Grave = 119,
/// <summary>
/// The minus key.
/// </summary>
Minus = 120,
/// <summary>
/// The plus key.
/// </summary>
Plus = 121,
/// <summary>
/// The left bracket key.
/// </summary>
BracketLeft = 122,
/// <summary>
/// The left bracket key (equivalent to BracketLeft).
/// </summary>
LBracket = 122,
/// <summary>
/// The right bracket key.
/// </summary>
BracketRight = 123,
/// <summary>
/// The right bracket key (equivalent to BracketRight).
/// </summary>
RBracket = 123,
/// <summary>
/// The semicolon key.
/// </summary>
Semicolon = 124,
/// <summary>
/// The quote key.
/// </summary>
Quote = 125,
/// <summary>
/// The comma key.
/// </summary>
Comma = 126,
/// <summary>
/// The period key.
/// </summary>
Period = 127,
/// <summary>
/// The slash key.
/// </summary>
Slash = 128,
/// <summary>
/// The backslash key.
/// </summary>
BackSlash = 129,
/// <summary>
/// The secondary backslash key.
/// </summary>
NonUSBackSlash = 130,
/// <summary>
/// Indicates the last available keyboard key.
/// </summary>
LastKey = 131,
FirstMouseButton = 132,
/// <summary>
/// The left mouse button.
/// </summary>
MouseLeft = 132,
/// <summary>
/// The middle mouse button.
/// </summary>
MouseMiddle = 133,
/// <summary>
/// The right mouse button.
/// </summary>
MouseRight = 134,
/// <summary>
/// The first extra mouse button.
/// </summary>
MouseButton1 = 135,
/// <summary>
/// The second extra mouse button.
/// </summary>
MouseButton2 = 136,
/// <summary>
/// The third extra mouse button.
/// </summary>
MouseButton3 = 137,
/// <summary>
/// The fourth extra mouse button.
/// </summary>
MouseButton4 = 138,
/// <summary>
/// The fifth extra mouse button.
/// </summary>
MouseButton5 = 139,
/// <summary>
/// The sixth extra mouse button.
/// </summary>
MouseButton6 = 140,
/// <summary>
/// The seventh extra mouse button.
/// </summary>
MouseButton7 = 141,
/// <summary>
/// The eigth extra mouse button.
/// </summary>
MouseButton8 = 142,
/// <summary>
/// The ninth extra mouse button.
/// </summary>
MouseButton9 = 143,
/// <summary>
/// Indicates the last available mouse button.
/// </summary>
MouseLastButton = 144,
}
}
| |
/// <summary> </summary>
using System;
using System.Collections.Generic;
namespace org.javarosa.core.reference
{
/// <summary> <p>The reference manager is a singleton class which
/// is responsible for deriving reference URI's into
/// references at runtime.</p>
///
/// <p>Raw reference factories
/// (which are capable of actually creating fully
/// qualified reference objects) are added with the
/// addFactory() method. The most common method
/// of doing so is to implement the PrefixedRootFactory
/// as either a full class, or an anonymous inner class,
/// providing the roots available in the current environment
/// and the code for constructing a reference from them.</p>
///
/// <p>RootTranslators (which rely on other factories) are
/// used to describe that a particular reference style (generally
/// a high level reference like "jr://media/" or "jr://images/"
/// should be translated to another available reference in this
/// environment like "jr://file/". Root Translators do not
/// directly derive references, but rather translate them to what
/// the reference should look like in the current circumstances.</p>
///
/// </summary>
/// <author> ctsims
///
/// </author>
public class ReferenceManager
{
/// <returns> The available reference factories
/// </returns>
virtual public ReferenceFactory[] Factories
{
get
{
ReferenceFactory[] roots = new ReferenceFactory[translators.Count];
translators.CopyTo((RootTranslator[])roots);
return roots;
}
}
private static ReferenceManager instance;
private List<RootTranslator> translators;
private List<ReferenceFactory> factories;
private List<RootTranslator> sessionTranslators;
private ReferenceManager()
{
translators = new List<RootTranslator>();
factories = new List<ReferenceFactory>();
sessionTranslators = new List<RootTranslator>();
}
/// <returns> Singleton accessor to the global
/// ReferenceManager.
/// </returns>
public static ReferenceManager _()
{
if (instance == null)
{
instance = new ReferenceManager();
}
return instance;
}
/// <summary> Adds a new Translator to the current environment.</summary>
/// <param name="translator">
/// </param>
public virtual void addRootTranslator(RootTranslator translator)
{
if (!translators.Contains(translator))
{
translators.Add(translator);
}
}
/// <summary> Adds a factory for deriving reference URI's into references</summary>
/// <param name="factory">A raw ReferenceFactory capable of creating
/// a reference.
/// </param>
public virtual void addReferenceFactory(ReferenceFactory factory)
{
if (!factories.Contains(factory))
{
factories.Add(factory);
}
}
public virtual bool removeReferenceFactory(ReferenceFactory factory)
{
return factories.Remove(factory);
}
/// <summary> Derives a global reference from a URI in the current environment.
///
/// </summary>
/// <param name="uri">The URI representing a global reference.
/// </param>
/// <returns> A reference which is identified by the provided URI.
/// </returns>
/// <throws> InvalidReferenceException If the current reference could </throws>
/// <summary> not be derived by the current environment
/// </summary>
public virtual Reference DeriveReference(System.String uri)
{
return DeriveReference(uri, (System.String)null);
}
/// <summary> Derives a reference from a URI in the current environment.
///
/// </summary>
/// <param name="uri">The URI representing a reference.
/// </param>
/// <param name="context">A reference which provides context for any
/// relative reference accessors.
/// </param>
/// <returns> A reference which is identified by the provided URI.
/// </returns>
/// <throws> InvalidReferenceException If the current reference could </throws>
/// <summary> not be derived by the current environment
/// </summary>
public virtual Reference DeriveReference(System.String uri, Reference context)
{
return DeriveReference(uri, context.getURI());
}
/// <summary> Derives a reference from a URI in the current environment.
///
/// </summary>
/// <param name="uri">The URI representing a reference.
/// </param>
/// <param name="context">A reference URI which provides context for any
/// relative reference accessors.
/// </param>
/// <returns> A reference which is identified by the provided URI.
/// </returns>
/// <throws> InvalidReferenceException If the current reference could </throws>
/// <summary> not be derived by the current environment, or if the context URI
/// is not valid in the current environment.
/// </summary>
public virtual Reference DeriveReference(System.String uri, System.String context)
{
if (uri == null)
{
throw new InvalidReferenceException("Null references aren't valid", uri);
}
//Relative URI's need to determine their context first.
if (isRelative(uri))
{
//Clean up the relative reference to lack any leading separators.
if (uri.StartsWith("./"))
{
uri = uri.Substring(2);
}
if (context == null)
{
throw new System.SystemException("Attempted to retrieve local reference with no context");
}
else
{
return derivingRoot(context).derive(uri, context);
}
}
else
{
return derivingRoot(uri).derive(uri);
}
}
/// <summary> Adds a root translator that is maintained over the course of a session. It will be globally
/// available until the session is cleared using the "clearSession" method.
///
/// </summary>
/// <param name="translator">A Root Translator that will be added to the current session
/// </param>
public virtual void addSessionRootTranslator(RootTranslator translator)
{
sessionTranslators.Add(translator);
}
/// <summary> Wipes out all of the translators being maintained in the current session (IE: Any translators
/// added via "addSessionRootTranslator". Used to manage a temporary set of translations for a limited
/// amount of time.
/// </summary>
public virtual void clearSession()
{
sessionTranslators.Clear();
}
private ReferenceFactory derivingRoot(System.String uri)
{
//First, try any/all roots which are put in the temporary session stack
foreach(RootTranslator root in sessionTranslators)
{
if (root.derives(uri))
{
return root;
}
}
//Now, try any/all roots referenced at runtime.
foreach(RootTranslator root in translators)
{
if (root.derives(uri))
{
return root;
}
}
//Now try all of the raw connectors available
foreach(ReferenceFactory root in factories)
{
if (root.derives(uri))
{
return root;
}
}
throw new InvalidReferenceException(getPrettyPrintException(uri), uri);
}
private System.String getPrettyPrintException(System.String uri)
{
if ((System.Object) uri == (System.Object) "")
{
return "Attempt to derive a blank reference";
}
try
{
System.String uriRoot = uri;
System.String jrRefMessagePortion = "reference type";
if (uri.IndexOf("jr://") != - 1)
{
uriRoot = uri.Substring("jr://".Length);
jrRefMessagePortion = "javarosa jr:// reference root";
}
//For http:// style uri's
int endOfRoot = uriRoot.IndexOf("://") + "://".Length;
if (endOfRoot == "://".Length - 1)
{
endOfRoot = uriRoot.IndexOf("/");
}
if (endOfRoot != - 1)
{
uriRoot = uriRoot.Substring(0, (endOfRoot) - (0));
}
System.String message = "The reference \"" + uri + "\" was invalid and couldn't be understood. The " + jrRefMessagePortion + " \"" + uriRoot + "\" is not available on this system and may have been mis-typed. Some available roots: ";
foreach(RootTranslator root in sessionTranslators)
{
message += ("\n" + root.prefix);
}
//Now, try any/all roots referenced at runtime.
foreach(RootTranslator root in translators)
{
message += ("\n" + root.prefix);
}
//Now try all of the raw connectors available
foreach (ReferenceFactory root in factories)
{
//TODO: Skeeeeeeeeeeeeetch
try
{
if (root is PrefixedRootFactory)
{
foreach(String rootName in ((PrefixedRootFactory) root).roots)
{
message += ("\n" + rootName);
}
}
else
{
message += ("\n" + root.derive("").getURI());
}
}
catch (System.Exception e)
{
}
}
return message;
}
catch (System.Exception e)
{
return "Couldn't process the reference " + uri + " . It may have been entered incorrectly. " + "Note that this doesn't mean that this doesn't mean the file or location referenced " + "couldn't be found, the reference itself was not understood.";
}
}
/// <param name="URI">
/// </param>
/// <returns> Whether the provided URI describe a relative reference.
/// </returns>
public static bool isRelative(System.String URI)
{
if (URI.StartsWith("./"))
{
return true;
}
return false;
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="CMSSignedDataParser.cs">
// Copyright (c) 2014 Alexander Logger.
// Copyright (c) 2000 - 2013 The Legion of the Bouncy Castle Inc. (http://www.bouncycastle.org).
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System.Collections;
using System.IO;
using Raksha.Asn1;
using Raksha.Asn1.Cms;
using Raksha.Asn1.X509;
using Raksha.Crypto;
using Raksha.Crypto.IO;
using Raksha.Security;
using Raksha.Utilities;
using Raksha.Utilities.Collections;
using Raksha.Utilities.IO;
using Raksha.X509.Store;
namespace Raksha.Cms
{
/**
* Parsing class for an CMS Signed Data object from an input stream.
* <p>
* Note: that because we are in a streaming mode only one signer can be tried and it is important
* that the methods on the parser are called in the appropriate order.
* </p>
* <p>
* A simple example of usage for an encapsulated signature.
* </p>
* <p>
* Two notes: first, in the example below the validity of
* the certificate isn't verified, just the fact that one of the certs
* matches the given signer, and, second, because we are in a streaming
* mode the order of the operations is important.
* </p>
* <pre>
* CmsSignedDataParser sp = new CmsSignedDataParser(encapSigData);
*
* sp.GetSignedContent().Drain();
*
* IX509Store certs = sp.GetCertificates();
* SignerInformationStore signers = sp.GetSignerInfos();
*
* foreach (SignerInformation signer in signers.GetSigners())
* {
* ArrayList certList = new ArrayList(certs.GetMatches(signer.SignerID));
* X509Certificate cert = (X509Certificate) certList[0];
*
* Console.WriteLine("verify returns: " + signer.Verify(cert));
* }
* </pre>
* Note also: this class does not introduce buffering - if you are processing large files you should create
* the parser with:
* <pre>
* CmsSignedDataParser ep = new CmsSignedDataParser(new BufferedInputStream(encapSigData, bufSize));
* </pre>
* where bufSize is a suitably large buffer size.
*/
public class CmsSignedDataParser : CmsContentInfoParser
{
private static readonly CmsSignedHelper Helper = CmsSignedHelper.Instance;
private readonly ISet _digestOids;
private readonly IDictionary _digests;
private readonly CmsTypedStream _signedContent;
private readonly DerObjectIdentifier _signedContentType;
private readonly SignedDataParser _signedData;
private IX509Store _attributeStore;
private Asn1Set _certSet;
private IX509Store _certificateStore;
private Asn1Set _crlSet;
private IX509Store _crlStore;
private bool _isCertCrlParsed;
private SignerInformationStore _signerInfoStore;
public CmsSignedDataParser(byte[] sigBlock) : this(new MemoryStream(sigBlock, false))
{
}
public CmsSignedDataParser(CmsTypedStream signedContent, byte[] sigBlock) : this(signedContent, new MemoryStream(sigBlock, false))
{
}
/**
* base constructor - with encapsulated content
*/
public CmsSignedDataParser(Stream sigData) : this(null, sigData)
{
}
/**
* base constructor
*
* @param signedContent the content that was signed.
* @param sigData the signature object.
*/
public CmsSignedDataParser(CmsTypedStream signedContent, Stream sigData) : base(sigData)
{
try
{
_signedContent = signedContent;
_signedData = SignedDataParser.GetInstance(contentInfo.GetContent(Asn1Tags.Sequence));
_digests = Platform.CreateHashtable();
_digestOids = new HashSet();
Asn1SetParser digAlgs = _signedData.GetDigestAlgorithms();
IAsn1Convertible o;
while ((o = digAlgs.ReadObject()) != null)
{
AlgorithmIdentifier id = AlgorithmIdentifier.GetInstance(o.ToAsn1Object());
try
{
string digestOid = id.ObjectID.Id;
string digestName = Helper.GetDigestAlgName(digestOid);
if (!_digests.Contains(digestName))
{
_digests[digestName] = Helper.GetDigestInstance(digestName);
_digestOids.Add(digestOid);
}
}
catch (SecurityUtilityException)
{
// TODO Should do something other than ignore it
}
}
//
// If the message is simply a certificate chain message GetContent() may return null.
//
ContentInfoParser cont = _signedData.GetEncapContentInfo();
var octs = (Asn1OctetStringParser) cont.GetContent(Asn1Tags.OctetString);
if (octs != null)
{
var ctStr = new CmsTypedStream(cont.ContentType.Id, octs.GetOctetStream());
if (_signedContent == null)
{
_signedContent = ctStr;
}
else
{
//
// content passed in, need to read past empty encapsulated content info object if present
//
ctStr.Drain();
}
}
_signedContentType = _signedContent == null ? cont.ContentType : new DerObjectIdentifier(_signedContent.ContentType);
}
catch (IOException e)
{
throw new CmsException("io exception: " + e.Message, e);
}
if (_digests.Count < 1)
{
throw new CmsException("no digests could be created for message.");
}
}
/**
* Return the version number for the SignedData object
*
* @return the version number
*/
public int Version
{
get { return _signedData.Version.Value.IntValue; }
}
public ISet DigestOids
{
get { return new HashSet(_digestOids); }
}
/// <summary>
/// Return the <c>DerObjectIdentifier</c> associated with the encapsulated
/// content info structure carried in the signed data.
/// </summary>
public DerObjectIdentifier SignedContentType
{
get { return _signedContentType; }
}
/**
* return the collection of signers that are associated with the
* signatures for the message.
* @throws CmsException
*/
public SignerInformationStore GetSignerInfos()
{
if (_signerInfoStore == null)
{
PopulateCertCrlSets();
IList signerInfos = Platform.CreateArrayList();
IDictionary hashes = Platform.CreateHashtable();
foreach (object digestKey in _digests.Keys)
{
hashes[digestKey] = DigestUtilities.DoFinal((IDigest) _digests[digestKey]);
}
try
{
Asn1SetParser s = _signedData.GetSignerInfos();
IAsn1Convertible o;
while ((o = s.ReadObject()) != null)
{
SignerInfo info = SignerInfo.GetInstance(o.ToAsn1Object());
string digestName = Helper.GetDigestAlgName(info.DigestAlgorithm.ObjectID.Id);
var hash = (byte[]) hashes[digestName];
signerInfos.Add(new SignerInformation(info, _signedContentType, null, new BaseDigestCalculator(hash)));
}
}
catch (IOException e)
{
throw new CmsException("io exception: " + e.Message, e);
}
_signerInfoStore = new SignerInformationStore(signerInfos);
}
return _signerInfoStore;
}
/**
* return a X509Store containing the attribute certificates, if any, contained
* in this message.
*
* @param type type of store to create
* @return a store of attribute certificates
* @exception org.bouncycastle.x509.NoSuchStoreException if the store type isn't available.
* @exception CmsException if a general exception prevents creation of the X509Store
*/
public IX509Store GetAttributeCertificates(string type)
{
if (_attributeStore == null)
{
PopulateCertCrlSets();
_attributeStore = Helper.CreateAttributeStore(type, _certSet);
}
return _attributeStore;
}
/**
* return a X509Store containing the public key certificates, if any, contained
* in this message.
*
* @param type type of store to create
* @return a store of public key certificates
* @exception NoSuchStoreException if the store type isn't available.
* @exception CmsException if a general exception prevents creation of the X509Store
*/
public IX509Store GetCertificates(string type)
{
if (_certificateStore == null)
{
PopulateCertCrlSets();
_certificateStore = Helper.CreateCertificateStore(type, _certSet);
}
return _certificateStore;
}
/**
* return a X509Store containing CRLs, if any, contained
* in this message.
*
* @param type type of store to create
* @return a store of CRLs
* @exception NoSuchStoreException if the store type isn't available.
* @exception CmsException if a general exception prevents creation of the X509Store
*/
public IX509Store GetCrls(string type)
{
if (_crlStore == null)
{
PopulateCertCrlSets();
_crlStore = Helper.CreateCrlStore(type, _crlSet);
}
return _crlStore;
}
private void PopulateCertCrlSets()
{
if (_isCertCrlParsed)
{
return;
}
_isCertCrlParsed = true;
try
{
// care! Streaming - Must process the GetCertificates() result before calling GetCrls()
_certSet = GetAsn1Set(_signedData.GetCertificates());
_crlSet = GetAsn1Set(_signedData.GetCrls());
}
catch (IOException e)
{
throw new CmsException("problem parsing cert/crl sets", e);
}
}
public CmsTypedStream GetSignedContent()
{
if (_signedContent == null)
{
return null;
}
Stream digStream = _signedContent.ContentStream;
foreach (IDigest digest in _digests.Values)
{
digStream = new DigestStream(digStream, digest, null);
}
return new CmsTypedStream(_signedContent.ContentType, digStream);
}
/**
* Replace the signerinformation store associated with the passed
* in message contained in the stream original with the new one passed in.
* You would probably only want to do this if you wanted to change the unsigned
* attributes associated with a signer, or perhaps delete one.
* <p>
* The output stream is returned unclosed.
* </p>
* @param original the signed data stream to be used as a base.
* @param signerInformationStore the new signer information store to use.
* @param out the stream to Write the new signed data object to.
* @return out.
*/
public static Stream ReplaceSigners(Stream original, SignerInformationStore signerInformationStore, Stream outStr)
{
// NB: SecureRandom would be ignored since using existing signatures only
var gen = new CmsSignedDataStreamGenerator();
var parser = new CmsSignedDataParser(original);
// gen.AddDigests(parser.DigestOids);
gen.AddSigners(signerInformationStore);
CmsTypedStream signedContent = parser.GetSignedContent();
bool encapsulate = (signedContent != null);
using (Stream contentOut = gen.Open(outStr, parser.SignedContentType.Id, encapsulate))
{
if (encapsulate)
{
Streams.PipeAll(signedContent.ContentStream, contentOut);
}
gen.AddAttributeCertificates(parser.GetAttributeCertificates("Collection"));
gen.AddCertificates(parser.GetCertificates("Collection"));
gen.AddCrls(parser.GetCrls("Collection"));
// gen.AddSigners(parser.GetSignerInfos());
}
return outStr;
}
/**
* Replace the certificate and CRL information associated with this
* CMSSignedData object with the new one passed in.
* <p>
* The output stream is returned unclosed.
* </p>
* @param original the signed data stream to be used as a base.
* @param certsAndCrls the new certificates and CRLs to be used.
* @param out the stream to Write the new signed data object to.
* @return out.
* @exception CmsException if there is an error processing the CertStore
*/
public static Stream ReplaceCertificatesAndCrls(Stream original, IX509Store x509Certs, IX509Store x509Crls, IX509Store x509AttrCerts, Stream outStr)
{
// NB: SecureRandom would be ignored since using existing signatures only
var gen = new CmsSignedDataStreamGenerator();
var parser = new CmsSignedDataParser(original);
gen.AddDigests(parser.DigestOids);
CmsTypedStream signedContent = parser.GetSignedContent();
bool encapsulate = (signedContent != null);
using (Stream contentOut = gen.Open(outStr, parser.SignedContentType.Id, encapsulate))
{
if (encapsulate)
{
Streams.PipeAll(signedContent.ContentStream, contentOut);
}
// gen.AddAttributeCertificates(parser.GetAttributeCertificates("Collection"));
// gen.AddCertificates(parser.GetCertificates("Collection"));
// gen.AddCrls(parser.GetCrls("Collection"));
if (x509AttrCerts != null)
{
gen.AddAttributeCertificates(x509AttrCerts);
}
if (x509Certs != null)
{
gen.AddCertificates(x509Certs);
}
if (x509Crls != null)
{
gen.AddCrls(x509Crls);
}
gen.AddSigners(parser.GetSignerInfos());
}
return outStr;
}
private static Asn1Set GetAsn1Set(Asn1SetParser asn1SetParser)
{
return asn1SetParser == null ? null : Asn1Set.GetInstance(asn1SetParser.ToAsn1Object());
}
}
}
| |
// <copyright file="Callbacks.cs" company="Google Inc.">
// Copyright (C) 2014 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
#if (UNITY_ANDROID || (UNITY_IPHONE && !NO_GPGS))
namespace GooglePlayGames.Native.PInvoke
{
using System;
using System.Runtime.InteropServices;
using GooglePlayGames.OurUtils;
using GooglePlayGames.Native.Cwrapper;
static class Callbacks
{
internal static readonly Action<CommonErrorStatus.UIStatus> NoopUICallback = status =>
{
Logger.d("Received UI callback: " + status);
};
internal delegate void ShowUICallbackInternal(CommonErrorStatus.UIStatus status,IntPtr data);
internal static IntPtr ToIntPtr<T>(Action<T> callback, Func<IntPtr, T> conversionFunction)
where T : BaseReferenceHolder
{
Action<IntPtr> pointerReceiver = result =>
{
using (T converted = conversionFunction(result))
{
if (callback != null)
{
callback(converted);
}
}
};
return ToIntPtr(pointerReceiver);
}
internal static IntPtr ToIntPtr<T, P>(Action<T, P> callback, Func<IntPtr, P> conversionFunction)
where P : BaseReferenceHolder
{
Action<T, IntPtr> pointerReceiver = (param1, param2) =>
{
using (P converted = conversionFunction(param2))
{
if (callback != null)
{
callback(param1, converted);
}
}
};
return ToIntPtr(pointerReceiver);
}
internal static IntPtr ToIntPtr(Delegate callback)
{
if (callback == null)
{
return IntPtr.Zero;
}
// Once the callback is passed off to native, we don't retain a reference to it - which
// means it's eligible for garbage collecting or being moved around by the runtime. If
// the garbage collector runs before the native code invokes the callback, chaos will
// ensue.
//
// To handle this, we create a normal GCHandle. The GCHandle will be freed when the callback returns the and
// handle is converted back to callback via IntPtrToCallback.
var handle = GCHandle.Alloc(callback);
return GCHandle.ToIntPtr(handle);
}
internal static T IntPtrToTempCallback<T>(IntPtr handle) where T : class
{
return IntPtrToCallback<T>(handle, true);
}
private static T IntPtrToCallback<T>(IntPtr handle, bool unpinHandle) where T : class
{
if (PInvokeUtilities.IsNull(handle))
{
return null;
}
var gcHandle = GCHandle.FromIntPtr(handle);
try
{
return (T)gcHandle.Target;
}
catch (System.InvalidCastException e)
{
Logger.e("GC Handle pointed to unexpected type: " + gcHandle.Target.ToString() +
". Expected " + typeof(T));
throw e;
}
finally
{
if (unpinHandle)
{
gcHandle.Free();
}
}
}
// TODO(hsakai): Better way of handling this.
internal static T IntPtrToPermanentCallback<T>(IntPtr handle) where T : class
{
return IntPtrToCallback<T>(handle, false);
}
[AOT.MonoPInvokeCallback(typeof(ShowUICallbackInternal))]
internal static void InternalShowUICallback(CommonErrorStatus.UIStatus status, IntPtr data)
{
Logger.d("Showing UI Internal callback: " + status);
var callback = IntPtrToTempCallback<Action<CommonErrorStatus.UIStatus>>(data);
try
{
callback(status);
}
catch (Exception e)
{
Logger.e("Error encountered executing InternalShowAllUICallback. " +
"Smothering to avoid passing exception into Native: " + e);
}
}
internal enum Type
{
Permanent,
Temporary}
;
internal static void PerformInternalCallback(string callbackName, Type callbackType,
IntPtr response, IntPtr userData)
{
Logger.d("Entering internal callback for " + callbackName);
Action<IntPtr> callback = callbackType == Type.Permanent
? IntPtrToPermanentCallback<Action<IntPtr>>(userData)
: IntPtrToTempCallback<Action<IntPtr>>(userData);
if (callback == null)
{
return;
}
try
{
callback(response);
}
catch (Exception e)
{
Logger.e("Error encountered executing " + callbackName + ". " +
"Smothering to avoid passing exception into Native: " + e);
}
}
internal static void PerformInternalCallback<T>(string callbackName, Type callbackType,
T param1, IntPtr param2, IntPtr userData)
{
Logger.d("Entering internal callback for " + callbackName);
Action<T, IntPtr> callback = null;
try
{
callback = callbackType == Type.Permanent
? IntPtrToPermanentCallback<Action<T, IntPtr>>(userData)
: IntPtrToTempCallback<Action<T, IntPtr>>(userData);
}
catch (Exception e)
{
Logger.e("Error encountered converting " + callbackName + ". " +
"Smothering to avoid passing exception into Native: " + e);
return;
}
Logger.d("Internal Callback converted to action");
if (callback == null)
{
return;
}
try
{
callback(param1, param2);
}
catch (Exception e)
{
Logger.e("Error encountered executing " + callbackName + ". " +
"Smothering to avoid passing exception into Native: " + e);
}
}
internal static Action<T> AsOnGameThreadCallback<T>(Action<T> toInvokeOnGameThread)
{
return result =>
{
if (toInvokeOnGameThread == null)
{
return;
}
PlayGamesHelperObject.RunOnGameThread(() => toInvokeOnGameThread(result));
};
}
internal static Action<T1, T2> AsOnGameThreadCallback<T1, T2>(
Action<T1, T2> toInvokeOnGameThread)
{
return (result1, result2) =>
{
if (toInvokeOnGameThread == null)
{
return;
}
PlayGamesHelperObject.RunOnGameThread(() => toInvokeOnGameThread(result1, result2));
};
}
internal static byte[] IntPtrAndSizeToByteArray(IntPtr data, UIntPtr dataLength)
{
if (dataLength.ToUInt64() == 0)
{
return null;
}
byte[] convertedData = new byte[dataLength.ToUInt32()];
Marshal.Copy(data, convertedData, 0, (int)dataLength.ToUInt32());
return convertedData;
}
}
}
#endif
| |
//---------------------------------------------------------------------
// <copyright file="NativeMethods.cs" company="Microsoft Corporation">
// Copyright (c) 1999, Microsoft Corporation. All rights reserved.
// </copyright>
// <summary>
// Part of the Deployment Tools Foundation project.
// </summary>
//---------------------------------------------------------------------
namespace Microsoft.PackageManagement.Archivers.Internal.Compression.Cab
{
using System;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using System.Security;
#if !CORECLR
using System.Security.Permissions;
#endif
/// <summary>
/// Native DllImport methods and related structures and constants used for
/// cabinet creation and extraction via cabinet.dll.
/// </summary>
internal static class NativeMethods
{
/// <summary>
/// A direct import of constants, enums, structures, delegates, and functions from fci.h.
/// Refer to comments in fci.h for documentation.
/// </summary>
internal static class FCI
{
internal const int MIN_DISK = 32768;
internal const int MAX_DISK = Int32.MaxValue;
internal const int MAX_FOLDER = 0x7FFF8000;
internal const int MAX_FILENAME = 256;
internal const int MAX_CABINET_NAME = 256;
internal const int MAX_CAB_PATH = 256;
internal const int MAX_DISK_NAME = 256;
internal const int CPU_80386 = 1;
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate IntPtr PFNALLOC(int cb);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate void PFNFREE(IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNOPEN(string path, int oflag, int pmode, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNREAD(int fileHandle, IntPtr memory, int cb, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNWRITE(int fileHandle, IntPtr memory, int cb, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNCLOSE(int fileHandle, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNSEEK(int fileHandle, int dist, int seekType, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNDELETE(string path, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNGETNEXTCABINET(IntPtr pccab, uint cbPrevCab, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNFILEPLACED(IntPtr pccab, string path, long fileSize, int continuation, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNGETOPENINFO(string path, out short date, out short time, out short pattribs, out int err, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNSTATUS(STATUS typeStatus, uint cb1, uint cb2, IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNGETTEMPFILE(IntPtr tempNamePtr, int tempNameSize, IntPtr pv);
/// <summary>
/// Error codes that can be returned by FCI.
/// </summary>
internal enum ERROR : int
{
NONE,
OPEN_SRC,
READ_SRC,
ALLOC_FAIL,
TEMP_FILE,
BAD_COMPR_TYPE,
CAB_FILE,
USER_ABORT,
MCI_FAIL,
}
/// <summary>
/// FCI compression algorithm types and parameters.
/// </summary>
internal enum TCOMP : ushort
{
MASK_TYPE = 0x000F,
TYPE_NONE = 0x0000,
TYPE_MSZIP = 0x0001,
TYPE_QUANTUM = 0x0002,
TYPE_LZX = 0x0003,
BAD = 0x000F,
MASK_LZX_WINDOW = 0x1F00,
LZX_WINDOW_LO = 0x0F00,
LZX_WINDOW_HI = 0x1500,
SHIFT_LZX_WINDOW = 0x0008,
MASK_QUANTUM_LEVEL = 0x00F0,
QUANTUM_LEVEL_LO = 0x0010,
QUANTUM_LEVEL_HI = 0x0070,
SHIFT_QUANTUM_LEVEL = 0x0004,
MASK_QUANTUM_MEM = 0x1F00,
QUANTUM_MEM_LO = 0x0A00,
QUANTUM_MEM_HI = 0x1500,
SHIFT_QUANTUM_MEM = 0x0008,
MASK_RESERVED = 0xE000,
}
/// <summary>
/// Reason for FCI status callback.
/// </summary>
internal enum STATUS : uint
{
FILE = 0,
FOLDER = 1,
CABINET = 2,
}
[SuppressMessage("Microsoft.Globalization", "CA2101:SpecifyMarshalingForPInvokeStringArguments")]
[DllImport("cabinet.dll", EntryPoint = "FCICreate", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
internal static extern Handle Create(IntPtr perf, PFNFILEPLACED pfnfcifp, PFNALLOC pfna, PFNFREE pfnf, PFNOPEN pfnopen, PFNREAD pfnread, PFNWRITE pfnwrite, PFNCLOSE pfnclose, PFNSEEK pfnseek, PFNDELETE pfndelete, PFNGETTEMPFILE pfnfcigtf, [MarshalAs(UnmanagedType.LPStruct)] CCAB pccab, IntPtr pv);
[DllImport("cabinet.dll", EntryPoint = "FCIAddFile", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
internal static extern int AddFile(Handle hfci, string pszSourceFile, IntPtr pszFileName, [MarshalAs(UnmanagedType.Bool)] bool fExecute, PFNGETNEXTCABINET pfnfcignc, PFNSTATUS pfnfcis, PFNGETOPENINFO pfnfcigoi, TCOMP typeCompress);
[DllImport("cabinet.dll", EntryPoint = "FCIFlushCabinet", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
internal static extern int FlushCabinet(Handle hfci, [MarshalAs(UnmanagedType.Bool)] bool fGetNextCab, PFNGETNEXTCABINET pfnfcignc, PFNSTATUS pfnfcis);
[DllImport("cabinet.dll", EntryPoint = "FCIFlushFolder", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
internal static extern int FlushFolder(Handle hfci, PFNGETNEXTCABINET pfnfcignc, PFNSTATUS pfnfcis);
#if !CORECLR
[SuppressUnmanagedCodeSecurity]
#endif
[DllImport("cabinet.dll", EntryPoint = "FCIDestroy", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool Destroy(IntPtr hfci);
/// <summary>
/// Cabinet information structure used for FCI initialization and GetNextCabinet callback.
/// </summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
internal class CCAB
{
internal int cb = MAX_DISK;
internal int cbFolderThresh = MAX_FOLDER;
internal int cbReserveCFHeader;
internal int cbReserveCFFolder;
internal int cbReserveCFData;
internal int iCab;
internal int iDisk;
internal int fFailOnIncompressible;
internal short setID;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = MAX_DISK_NAME)] internal string szDisk = String.Empty;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = MAX_CABINET_NAME)] internal string szCab = String.Empty;
[MarshalAs(UnmanagedType.ByValTStr, SizeConst = MAX_CAB_PATH)] internal string szCabPath = String.Empty;
}
/// <summary>
/// Ensures that the FCI handle is safely released.
/// </summary>
internal class Handle : SafeHandle
{
/// <summary>
/// Creates a new unintialized handle. The handle will be initialized
/// when it is marshalled back from native code.
/// </summary>
internal Handle()
: base(IntPtr.Zero, true)
{
}
/// <summary>
/// Checks if the handle is invalid. An FCI handle is invalid when it is zero.
/// </summary>
public override bool IsInvalid
{
get
{
return this.handle == IntPtr.Zero;
}
}
/// <summary>
/// Releases the handle by calling FDIDestroy().
/// </summary>
/// <returns>True if the release succeeded.</returns>
#if !CORECLR
[SecurityPermission(SecurityAction.Assert, UnmanagedCode = true)]
#endif
protected override bool ReleaseHandle()
{
return FCI.Destroy(this.handle);
}
}
}
/// <summary>
/// A direct import of constants, enums, structures, delegates, and functions from fdi.h.
/// Refer to comments in fdi.h for documentation.
/// </summary>
internal static class FDI
{
internal const int MAX_DISK = Int32.MaxValue;
internal const int MAX_FILENAME = 256;
internal const int MAX_CABINET_NAME = 256;
internal const int MAX_CAB_PATH = 256;
internal const int MAX_DISK_NAME = 256;
internal const int CPU_80386 = 1;
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate IntPtr PFNALLOC(int cb);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate void PFNFREE(IntPtr pv);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNOPEN(string path, int oflag, int pmode);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNREAD(int hf, IntPtr pv, int cb);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNWRITE(int hf, IntPtr pv, int cb);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNCLOSE(int hf);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNSEEK(int hf, int dist, int seektype);
[UnmanagedFunctionPointer(CallingConvention.Cdecl)] internal delegate int PFNNOTIFY(NOTIFICATIONTYPE fdint, NOTIFICATION fdin);
/// <summary>
/// Error codes that can be returned by FDI.
/// </summary>
internal enum ERROR : int
{
NONE,
CABINET_NOT_FOUND,
NOT_A_CABINET,
UNKNOWN_CABINET_VERSION,
CORRUPT_CABINET,
ALLOC_FAIL,
BAD_COMPR_TYPE,
MDI_FAIL,
TARGET_FILE,
RESERVE_MISMATCH,
WRONG_CABINET,
USER_ABORT,
}
/// <summary>
/// Type of notification message for the FDI Notify callback.
/// </summary>
internal enum NOTIFICATIONTYPE : int
{
CABINET_INFO,
PARTIAL_FILE,
COPY_FILE,
CLOSE_FILE_INFO,
NEXT_CABINET,
ENUMERATE,
}
[DllImport("cabinet.dll", EntryPoint = "FDICreate", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
internal static extern Handle Create([MarshalAs(UnmanagedType.FunctionPtr)] PFNALLOC pfnalloc, [MarshalAs(UnmanagedType.FunctionPtr)] PFNFREE pfnfree, PFNOPEN pfnopen, PFNREAD pfnread, PFNWRITE pfnwrite, PFNCLOSE pfnclose, PFNSEEK pfnseek, int cpuType, IntPtr perf);
[DllImport("cabinet.dll", EntryPoint = "FDICopy", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
internal static extern int Copy(Handle hfdi, string pszCabinet, string pszCabPath, int flags, PFNNOTIFY pfnfdin, IntPtr pfnfdid, IntPtr pvUser);
#if !CORECLR
[SuppressUnmanagedCodeSecurity]
#endif
[DllImport("cabinet.dll", EntryPoint = "FDIDestroy", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool Destroy(IntPtr hfdi);
[DllImport("cabinet.dll", EntryPoint = "FDIIsCabinet", CharSet = CharSet.Ansi, BestFitMapping = false, ThrowOnUnmappableChar = true, CallingConvention = CallingConvention.Cdecl)]
[SuppressMessage("Microsoft.Portability", "CA1901:PInvokeDeclarationsShouldBePortable", Justification = "FDI file handles definitely remain 4 bytes on 64bit platforms.")]
internal static extern int IsCabinet(Handle hfdi, int hf, out CABINFO pfdici);
/// <summary>
/// Cabinet information structure filled in by FDI IsCabinet.
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct CABINFO
{
internal int cbCabinet;
internal short cFolders;
internal short cFiles;
internal short setID;
internal short iCabinet;
internal int fReserve;
internal int hasprev;
internal int hasnext;
}
/// <summary>
/// Cabinet notification details passed to the FDI Notify callback.
/// </summary>
[SuppressMessage("Microsoft.Performance", "CA1812:AvoidUninstantiatedInternalClasses")]
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Ansi)]
internal class NOTIFICATION
{
internal int cb;
internal IntPtr psz1;
internal IntPtr psz2;
internal IntPtr psz3;
internal IntPtr pv;
internal IntPtr hf_ptr;
internal short date;
internal short time;
internal short attribs;
internal short setID;
internal short iCabinet;
internal short iFolder;
internal int fdie;
// Unlike all the other file handles in FCI/FDI, this one is
// actually pointer-sized. Use a property to pretend it isn't.
internal int hf
{
get { return (int)this.hf_ptr; }
}
}
/// <summary>
/// Ensures that the FDI handle is safely released.
/// </summary>
internal class Handle : SafeHandle
{
/// <summary>
/// Creates a new unintialized handle. The handle will be initialized
/// when it is marshalled back from native code.
/// </summary>
internal Handle()
: base(IntPtr.Zero, true)
{
}
/// <summary>
/// Checks if the handle is invalid. An FDI handle is invalid when it is zero.
/// </summary>
public override bool IsInvalid
{
get
{
return this.handle == IntPtr.Zero;
}
}
/// <summary>
/// Releases the handle by calling FDIDestroy().
/// </summary>
/// <returns>True if the release succeeded.</returns>
protected override bool ReleaseHandle()
{
return FDI.Destroy(this.handle);
}
}
}
/// <summary>
/// Error info structure for FCI and FDI.
/// </summary>
/// <remarks>Before being passed to FCI or FDI, this structure is
/// pinned in memory via a GCHandle. The pinning is necessary
/// to be able to read the results, since the ERF structure doesn't
/// get marshalled back out after an error.</remarks>
[StructLayout(LayoutKind.Sequential)]
internal class ERF
{
private int erfOper;
private int erfType;
private int fError;
/// <summary>
/// Gets or sets the cabinet error code.
/// </summary>
internal int Oper
{
get
{
return this.erfOper;
}
set
{
this.erfOper = value;
}
}
/// <summary>
/// Gets or sets the Win32 error code.
/// </summary>
internal int Type
{
get
{
return this.erfType;
}
set
{
this.erfType = value;
}
}
/// <summary>
/// GCHandle doesn't like the bool type, so use an int underneath.
/// </summary>
internal bool Error
{
get
{
return this.fError != 0;
}
set
{
this.fError = value ? 1 : 0;
}
}
/// <summary>
/// Clears the error information.
/// </summary>
internal void Clear()
{
this.Oper = 0;
this.Type = 0;
this.Error = false;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace Grackle.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
using System;
using System.Collections;
using System.Text;
using System.Text.RegularExpressions;
namespace umbraco.cms.businesslogic.utilities {
/// <summary>
/// This Class implements the Difference Algorithm published in
/// "An O(ND) Difference Algorithm and its Variations" by Eugene Myers
/// Algorithmica Vol. 1 No. 2, 1986, p 251.
///
/// There are many C, Java, Lisp implementations public available but they all seem to come
/// from the same source (diffutils) that is under the (unfree) GNU public License
/// and cannot be reused as a sourcecode for a commercial application.
/// There are very old C implementations that use other (worse) algorithms.
/// Microsoft also published sourcecode of a diff-tool (windiff) that uses some tree data.
/// Also, a direct transfer from a C source to C# is not easy because there is a lot of pointer
/// arithmetic in the typical C solutions and i need a managed solution.
/// These are the reasons why I implemented the original published algorithm from the scratch and
/// make it avaliable without the GNU license limitations.
/// I do not need a high performance diff tool because it is used only sometimes.
/// I will do some performace tweaking when needed.
///
/// The algorithm itself is comparing 2 arrays of numbers so when comparing 2 text documents
/// each line is converted into a (hash) number. See DiffText().
///
/// Some chages to the original algorithm:
/// The original algorithm was described using a recursive approach and comparing zero indexed arrays.
/// Extracting sub-arrays and rejoining them is very performance and memory intensive so the same
/// (readonly) data arrays are passed arround together with their lower and upper bounds.
/// This circumstance makes the LCS and SMS functions more complicate.
/// I added some code to the LCS function to get a fast response on sub-arrays that are identical,
/// completely deleted or inserted.
///
/// The result from a comparisation is stored in 2 arrays that flag for modified (deleted or inserted)
/// lines in the 2 data arrays. These bits are then analysed to produce a array of Item objects.
///
/// Further possible optimizations:
/// (first rule: don't do it; second: don't do it yet)
/// The arrays DataA and DataB are passed as parameters, but are never changed after the creation
/// so they can be members of the class to avoid the paramter overhead.
/// In SMS is a lot of boundary arithmetic in the for-D and for-k loops that can be done by increment
/// and decrement of local variables.
/// The DownVector and UpVector arrays are alywas created and destroyed each time the SMS gets called.
/// It is possible to reuse tehm when transfering them to members of the class.
/// See TODO: hints.
///
/// diff.cs: A port of the algorythm to C#
/// Copyright (c) by Matthias Hertel, http://www.mathertel.de
/// This work is licensed under a BSD style license. See http://www.mathertel.de/License.aspx
///
/// Changes:
/// 2002.09.20 There was a "hang" in some situations.
/// Now I undestand a little bit more of the SMS algorithm.
/// There have been overlapping boxes; that where analyzed partial differently.
/// One return-point is enough.
/// A assertion was added in CreateDiffs when in debug-mode, that counts the number of equal (no modified) lines in both arrays.
/// They must be identical.
///
/// 2003.02.07 Out of bounds error in the Up/Down vector arrays in some situations.
/// The two vetors are now accessed using different offsets that are adjusted using the start k-Line.
/// A test case is added.
///
/// 2006.03.05 Some documentation and a direct Diff entry point.
///
/// 2006.03.08 Refactored the API to static methods on the Diff class to make usage simpler.
/// 2006.03.10 using the standard Debug class for self-test now.
/// compile with: csc /target:exe /out:diffTest.exe /d:DEBUG /d:TRACE /d:SELFTEST Diff.cs
/// 2007.01.06 license agreement changed to a BSD style license.
/// 2007.06.03 added the Optimize method.
/// 2007.09.23 UpVector and DownVector optimization by Jan Stoklasa ().
/// </summary>
public class Diff {
/// <summary>details of one difference.</summary>
public struct Item {
/// <summary>Start Line number in Data A.</summary>
public int StartA;
/// <summary>Start Line number in Data B.</summary>
public int StartB;
/// <summary>Number of changes in Data A.</summary>
public int deletedA;
/// <summary>Number of changes in Data B.</summary>
public int insertedB;
} // Item
/// <summary>
/// Shortest Middle Snake Return Data
/// </summary>
private struct SMSRD {
internal int x, y;
// internal int u, v; // 2002.09.20: no need for 2 points
}
#region self-Test
#if (SELFTEST)
/// <summary>
/// start a self- / box-test for some diff cases and report to the debug output.
/// </summary>
/// <param name="args">not used</param>
/// <returns>always 0</returns>
public static int Main(string[] args) {
StringBuilder ret = new StringBuilder();
string a, b;
System.Diagnostics.ConsoleTraceListener ctl = new System.Diagnostics.ConsoleTraceListener(false);
System.Diagnostics.Debug.Listeners.Add(ctl);
System.Console.WriteLine("Diff Self Test...");
// test all changes
a = "a,b,c,d,e,f,g,h,i,j,k,l".Replace(',', '\n');
b = "0,1,2,3,4,5,6,7,8,9".Replace(',', '\n');
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "12.10.0.0*",
"all-changes test failed.");
System.Diagnostics.Debug.WriteLine("all-changes test passed.");
// test all same
a = "a,b,c,d,e,f,g,h,i,j,k,l".Replace(',', '\n');
b = a;
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "",
"all-same test failed.");
System.Diagnostics.Debug.WriteLine("all-same test passed.");
// test snake
a = "a,b,c,d,e,f".Replace(',', '\n');
b = "b,c,d,e,f,x".Replace(',', '\n');
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "1.0.0.0*0.1.6.5*",
"snake test failed.");
System.Diagnostics.Debug.WriteLine("snake test passed.");
// 2002.09.20 - repro
a = "c1,a,c2,b,c,d,e,g,h,i,j,c3,k,l".Replace(',', '\n');
b = "C1,a,C2,b,c,d,e,I1,e,g,h,i,j,C3,k,I2,l".Replace(',', '\n');
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "1.1.0.0*1.1.2.2*0.2.7.7*1.1.11.13*0.1.13.15*",
"repro20020920 test failed.");
System.Diagnostics.Debug.WriteLine("repro20020920 test passed.");
// 2003.02.07 - repro
a = "F".Replace(',', '\n');
b = "0,F,1,2,3,4,5,6,7".Replace(',', '\n');
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "0.1.0.0*0.7.1.2*",
"repro20030207 test failed.");
System.Diagnostics.Debug.WriteLine("repro20030207 test passed.");
// Muegel - repro
a = "HELLO\nWORLD";
b = "\n\nhello\n\n\n\nworld\n";
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "2.8.0.0*",
"repro20030409 test failed.");
System.Diagnostics.Debug.WriteLine("repro20030409 test passed.");
// test some differences
a = "a,b,-,c,d,e,f,f".Replace(',', '\n');
b = "a,b,x,c,e,f".Replace(',', '\n');
System.Diagnostics.Debug.Assert(TestHelper(Diff.DiffText(a, b, false, false, false))
== "1.1.2.2*1.0.4.4*1.0.6.5*",
"some-changes test failed.");
System.Diagnostics.Debug.WriteLine("some-changes test passed.");
System.Diagnostics.Debug.WriteLine("End.");
System.Diagnostics.Debug.Flush();
return (0);
}
public static string TestHelper(Item []f) {
StringBuilder ret = new StringBuilder();
for (int n = 0; n < f.Length; n++) {
ret.Append(f[n].deletedA.ToString() + "." + f[n].insertedB.ToString() + "." + f[n].StartA.ToString() + "." + f[n].StartB.ToString() + "*");
}
// Debug.Write(5, "TestHelper", ret.ToString());
return (ret.ToString());
}
#endif
#endregion
/// <summary>
/// Find the difference in 2 texts, comparing by textlines, returns the result as Html.
/// If content has been removed, it will be marked up with a <del> html element.
/// If content has been added, it will be marked up with a <ins> html element
/// </summary>
/// <param name="a_line">The old version of the string.</param>
/// <param name="b_line">The new version of the string.</param>
/// <returns></returns>
public static string Diff2Html(string a_line, string b_line) {
int[] a_codes = DiffCharCodes(a_line, false);
int[] b_codes = DiffCharCodes(b_line, false);
string result = "";
Diff.Item[] diffs = Diff.DiffInt(a_codes, b_codes);
int pos = 0;
for (int n = 0; n < diffs.Length; n++) {
Diff.Item it = diffs[n];
// write unchanged chars
while ((pos < it.StartB) && (pos < b_line.Length)) {
result += b_line[pos];
pos++;
} // while
// write deleted chars
if (it.deletedA > 0) {
result += "<del>";
for (int m = 0; m < it.deletedA; m++) {
result += a_line[it.StartA + m];
} // for
result += "</del>";
}
// write inserted chars
if (pos < it.StartB + it.insertedB) {
result += "<ins>";
while (pos < it.StartB + it.insertedB) {
result += b_line[pos];
pos++;
} // while
result += "</ins>";
} // if
} // while
// write rest of unchanged chars
while (pos < b_line.Length) {
result += b_line[pos];
pos++;
}
return result;
}
private static int[] DiffCharCodes(string aText, bool ignoreCase) {
int[] Codes;
if (ignoreCase)
aText = aText.ToUpperInvariant();
Codes = new int[aText.Length];
for (int n = 0; n < aText.Length; n++)
Codes[n] = (int)aText[n];
return (Codes);
}
/// <summary>
/// Find the difference in 2 texts, comparing by textlines.
/// </summary>
/// <param name="TextA">A-version of the text (usualy the old one)</param>
/// <param name="TextB">B-version of the text (usualy the new one)</param>
/// <returns>Returns a array of Items that describe the differences.</returns>
public Item[] DiffText(string TextA, string TextB) {
return (DiffText(TextA, TextB, false, false, false));
} // DiffText
/// <summary>
/// Find the difference in 2 text documents, comparing by textlines.
/// The algorithm itself is comparing 2 arrays of numbers so when comparing 2 text documents
/// each line is converted into a (hash) number. This hash-value is computed by storing all
/// textlines into a common hashtable so i can find dublicates in there, and generating a
/// new number each time a new textline is inserted.
/// </summary>
/// <param name="TextA">A-version of the text (usualy the old one)</param>
/// <param name="TextB">B-version of the text (usualy the new one)</param>
/// <param name="trimSpace">When set to true, all leading and trailing whitespace characters are stripped out before the comparation is done.</param>
/// <param name="ignoreSpace">When set to true, all whitespace characters are converted to a single space character before the comparation is done.</param>
/// <param name="ignoreCase">When set to true, all characters are converted to their lowercase equivivalence before the comparation is done.</param>
/// <returns>Returns a array of Items that describe the differences.</returns>
public static Item[] DiffText(string TextA, string TextB, bool trimSpace, bool ignoreSpace, bool ignoreCase) {
// prepare the input-text and convert to comparable numbers.
Hashtable h = new Hashtable(TextA.Length + TextB.Length);
// The A-Version of the data (original data) to be compared.
DiffData DataA = new DiffData(DiffCodes(TextA, h, trimSpace, ignoreSpace, ignoreCase));
// The B-Version of the data (modified data) to be compared.
DiffData DataB = new DiffData(DiffCodes(TextB, h, trimSpace, ignoreSpace, ignoreCase));
h = null; // free up hashtable memory (maybe)
int MAX = DataA.Length + DataB.Length + 1;
/// vector for the (0,0) to (x,y) search
int[] DownVector = new int[2 * MAX + 2];
/// vector for the (u,v) to (N,M) search
int[] UpVector = new int[2 * MAX + 2];
LCS(DataA, 0, DataA.Length, DataB, 0, DataB.Length, DownVector, UpVector);
Optimize(DataA);
Optimize(DataB);
return CreateDiffs(DataA, DataB);
} // DiffText
/// <summary>
/// If a sequence of modified lines starts with a line that contains the same content
/// as the line that appends the changes, the difference sequence is modified so that the
/// appended line and not the starting line is marked as modified.
/// This leads to more readable diff sequences when comparing text files.
/// </summary>
/// <param name="Data">A Diff data buffer containing the identified changes.</param>
private static void Optimize(DiffData Data) {
int StartPos, EndPos;
StartPos = 0;
while (StartPos < Data.Length) {
while ((StartPos < Data.Length) && (Data.modified[StartPos] == false))
StartPos++;
EndPos = StartPos;
while ((EndPos < Data.Length) && (Data.modified[EndPos] == true))
EndPos++;
if ((EndPos < Data.Length) && (Data.data[StartPos] == Data.data[EndPos])) {
Data.modified[StartPos] = false;
Data.modified[EndPos] = true;
} else {
StartPos = EndPos;
} // if
} // while
} // Optimize
/// <summary>
/// Find the difference in 2 arrays of integers.
/// </summary>
/// <param name="ArrayA">A-version of the numbers (usualy the old one)</param>
/// <param name="ArrayB">B-version of the numbers (usualy the new one)</param>
/// <returns>Returns a array of Items that describe the differences.</returns>
public static Item[] DiffInt(int[] ArrayA, int[] ArrayB) {
// The A-Version of the data (original data) to be compared.
DiffData DataA = new DiffData(ArrayA);
// The B-Version of the data (modified data) to be compared.
DiffData DataB = new DiffData(ArrayB);
int MAX = DataA.Length + DataB.Length + 1;
/// vector for the (0,0) to (x,y) search
int[] DownVector = new int[2 * MAX + 2];
/// vector for the (u,v) to (N,M) search
int[] UpVector = new int[2 * MAX + 2];
LCS(DataA, 0, DataA.Length, DataB, 0, DataB.Length, DownVector, UpVector);
return CreateDiffs(DataA, DataB);
} // Diff
/// <summary>
/// This function converts all textlines of the text into unique numbers for every unique textline
/// so further work can work only with simple numbers.
/// </summary>
/// <param name="aText">the input text</param>
/// <param name="h">This extern initialized hashtable is used for storing all ever used textlines.</param>
/// <param name="trimSpace">ignore leading and trailing space characters</param>
/// <returns>a array of integers.</returns>
private static int[] DiffCodes(string aText, Hashtable h, bool trimSpace, bool ignoreSpace, bool ignoreCase) {
// get all codes of the text
string[] Lines;
int[] Codes;
int lastUsedCode = h.Count;
object aCode;
string s;
// strip off all cr, only use lf as textline separator.
aText = aText.Replace("\r", "");
Lines = aText.Split('\n');
Codes = new int[Lines.Length];
for (int i = 0; i < Lines.Length; ++i) {
s = Lines[i];
if (trimSpace)
s = s.Trim();
if (ignoreSpace) {
s = Regex.Replace(s, "\\s+", " "); // TODO: optimization: faster blank removal.
}
if (ignoreCase)
s = s.ToLower();
aCode = h[s];
if (aCode == null) {
lastUsedCode++;
h[s] = lastUsedCode;
Codes[i] = lastUsedCode;
} else {
Codes[i] = (int)aCode;
} // if
} // for
return (Codes);
} // DiffCodes
/// <summary>
/// This is the algorithm to find the Shortest Middle Snake (SMS).
/// </summary>
/// <param name="DataA">sequence A</param>
/// <param name="LowerA">lower bound of the actual range in DataA</param>
/// <param name="UpperA">upper bound of the actual range in DataA (exclusive)</param>
/// <param name="DataB">sequence B</param>
/// <param name="LowerB">lower bound of the actual range in DataB</param>
/// <param name="UpperB">upper bound of the actual range in DataB (exclusive)</param>
/// <param name="DownVector">a vector for the (0,0) to (x,y) search. Passed as a parameter for speed reasons.</param>
/// <param name="UpVector">a vector for the (u,v) to (N,M) search. Passed as a parameter for speed reasons.</param>
/// <returns>a MiddleSnakeData record containing x,y and u,v</returns>
private static SMSRD SMS(DiffData DataA, int LowerA, int UpperA, DiffData DataB, int LowerB, int UpperB,
int[] DownVector, int[] UpVector) {
SMSRD ret;
int MAX = DataA.Length + DataB.Length + 1;
int DownK = LowerA - LowerB; // the k-line to start the forward search
int UpK = UpperA - UpperB; // the k-line to start the reverse search
int Delta = (UpperA - LowerA) - (UpperB - LowerB);
bool oddDelta = (Delta & 1) != 0;
// The vectors in the publication accepts negative indexes. the vectors implemented here are 0-based
// and are access using a specific offset: UpOffset UpVector and DownOffset for DownVektor
int DownOffset = MAX - DownK;
int UpOffset = MAX - UpK;
int MaxD = ((UpperA - LowerA + UpperB - LowerB) / 2) + 1;
// Debug.Write(2, "SMS", String.Format("Search the box: A[{0}-{1}] to B[{2}-{3}]", LowerA, UpperA, LowerB, UpperB));
// init vectors
DownVector[DownOffset + DownK + 1] = LowerA;
UpVector[UpOffset + UpK - 1] = UpperA;
for (int D = 0; D <= MaxD; D++) {
// Extend the forward path.
for (int k = DownK - D; k <= DownK + D; k += 2) {
// Debug.Write(0, "SMS", "extend forward path " + k.ToString());
// find the only or better starting point
int x, y;
if (k == DownK - D) {
x = DownVector[DownOffset + k + 1]; // down
} else {
x = DownVector[DownOffset + k - 1] + 1; // a step to the right
if ((k < DownK + D) && (DownVector[DownOffset + k + 1] >= x))
x = DownVector[DownOffset + k + 1]; // down
}
y = x - k;
// find the end of the furthest reaching forward D-path in diagonal k.
while ((x < UpperA) && (y < UpperB) && (DataA.data[x] == DataB.data[y])) {
x++; y++;
}
DownVector[DownOffset + k] = x;
// overlap ?
if (oddDelta && (UpK - D < k) && (k < UpK + D)) {
if (UpVector[UpOffset + k] <= DownVector[DownOffset + k]) {
ret.x = DownVector[DownOffset + k];
ret.y = DownVector[DownOffset + k] - k;
// ret.u = UpVector[UpOffset + k]; // 2002.09.20: no need for 2 points
// ret.v = UpVector[UpOffset + k] - k;
return (ret);
} // if
} // if
} // for k
// Extend the reverse path.
for (int k = UpK - D; k <= UpK + D; k += 2) {
// Debug.Write(0, "SMS", "extend reverse path " + k.ToString());
// find the only or better starting point
int x, y;
if (k == UpK + D) {
x = UpVector[UpOffset + k - 1]; // up
} else {
x = UpVector[UpOffset + k + 1] - 1; // left
if ((k > UpK - D) && (UpVector[UpOffset + k - 1] < x))
x = UpVector[UpOffset + k - 1]; // up
} // if
y = x - k;
while ((x > LowerA) && (y > LowerB) && (DataA.data[x - 1] == DataB.data[y - 1])) {
x--; y--; // diagonal
}
UpVector[UpOffset + k] = x;
// overlap ?
if (!oddDelta && (DownK - D <= k) && (k <= DownK + D)) {
if (UpVector[UpOffset + k] <= DownVector[DownOffset + k]) {
ret.x = DownVector[DownOffset + k];
ret.y = DownVector[DownOffset + k] - k;
// ret.u = UpVector[UpOffset + k]; // 2002.09.20: no need for 2 points
// ret.v = UpVector[UpOffset + k] - k;
return (ret);
} // if
} // if
} // for k
} // for D
throw new ApplicationException("the algorithm should never come here.");
} // SMS
/// <summary>
/// This is the divide-and-conquer implementation of the longes common-subsequence (LCS)
/// algorithm.
/// The published algorithm passes recursively parts of the A and B sequences.
/// To avoid copying these arrays the lower and upper bounds are passed while the sequences stay constant.
/// </summary>
/// <param name="DataA">sequence A</param>
/// <param name="LowerA">lower bound of the actual range in DataA</param>
/// <param name="UpperA">upper bound of the actual range in DataA (exclusive)</param>
/// <param name="DataB">sequence B</param>
/// <param name="LowerB">lower bound of the actual range in DataB</param>
/// <param name="UpperB">upper bound of the actual range in DataB (exclusive)</param>
/// <param name="DownVector">a vector for the (0,0) to (x,y) search. Passed as a parameter for speed reasons.</param>
/// <param name="UpVector">a vector for the (u,v) to (N,M) search. Passed as a parameter for speed reasons.</param>
private static void LCS(DiffData DataA, int LowerA, int UpperA, DiffData DataB, int LowerB, int UpperB, int[] DownVector, int[] UpVector) {
// Debug.Write(2, "LCS", String.Format("Analyse the box: A[{0}-{1}] to B[{2}-{3}]", LowerA, UpperA, LowerB, UpperB));
// Fast walkthrough equal lines at the start
while (LowerA < UpperA && LowerB < UpperB && DataA.data[LowerA] == DataB.data[LowerB]) {
LowerA++; LowerB++;
}
// Fast walkthrough equal lines at the end
while (LowerA < UpperA && LowerB < UpperB && DataA.data[UpperA - 1] == DataB.data[UpperB - 1]) {
--UpperA; --UpperB;
}
if (LowerA == UpperA) {
// mark as inserted lines.
while (LowerB < UpperB)
DataB.modified[LowerB++] = true;
} else if (LowerB == UpperB) {
// mark as deleted lines.
while (LowerA < UpperA)
DataA.modified[LowerA++] = true;
} else {
// Find the middle snakea and length of an optimal path for A and B
SMSRD smsrd = SMS(DataA, LowerA, UpperA, DataB, LowerB, UpperB, DownVector, UpVector);
// Debug.Write(2, "MiddleSnakeData", String.Format("{0},{1}", smsrd.x, smsrd.y));
// The path is from LowerX to (x,y) and (x,y) to UpperX
LCS(DataA, LowerA, smsrd.x, DataB, LowerB, smsrd.y, DownVector, UpVector);
LCS(DataA, smsrd.x, UpperA, DataB, smsrd.y, UpperB, DownVector, UpVector); // 2002.09.20: no need for 2 points
}
} // LCS()
/// <summary>Scan the tables of which lines are inserted and deleted,
/// producing an edit script in forward order.
/// </summary>
/// dynamic array
private static Item[] CreateDiffs(DiffData DataA, DiffData DataB) {
ArrayList a = new ArrayList();
Item aItem;
Item[] result;
int StartA, StartB;
int LineA, LineB;
LineA = 0;
LineB = 0;
while (LineA < DataA.Length || LineB < DataB.Length) {
if ((LineA < DataA.Length) && (!DataA.modified[LineA])
&& (LineB < DataB.Length) && (!DataB.modified[LineB])) {
// equal lines
LineA++;
LineB++;
} else {
// maybe deleted and/or inserted lines
StartA = LineA;
StartB = LineB;
while (LineA < DataA.Length && (LineB >= DataB.Length || DataA.modified[LineA]))
// while (LineA < DataA.Length && DataA.modified[LineA])
LineA++;
while (LineB < DataB.Length && (LineA >= DataA.Length || DataB.modified[LineB]))
// while (LineB < DataB.Length && DataB.modified[LineB])
LineB++;
if ((StartA < LineA) || (StartB < LineB)) {
// store a new difference-item
aItem = new Item();
aItem.StartA = StartA;
aItem.StartB = StartB;
aItem.deletedA = LineA - StartA;
aItem.insertedB = LineB - StartB;
a.Add(aItem);
} // if
} // if
} // while
result = new Item[a.Count];
a.CopyTo(result);
return (result);
}
} // class Diff
/// <summary>Data on one input file being compared.
/// </summary>
internal class DiffData {
/// <summary>Number of elements (lines).</summary>
internal int Length;
/// <summary>Buffer of numbers that will be compared.</summary>
internal int[] data;
/// <summary>
/// Array of booleans that flag for modified data.
/// This is the result of the diff.
/// This means deletedA in the first Data or inserted in the second Data.
/// </summary>
internal bool[] modified;
/// <summary>
/// Initialize the Diff-Data buffer.
/// </summary>
/// <param name="data">reference to the buffer</param>
internal DiffData(int[] initData) {
data = initData;
Length = initData.Length;
modified = new bool[Length + 2];
} // DiffData
} // class DiffData
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
using Microsoft.Xunit.Performance;
using System.IO;
using System.Collections.Generic;
namespace System.Diagnostics.Tests
{
public class Perf_Process : ProcessTestBase
{
[Benchmark(Skip="Issue 16653")]
public void Kill()
{
const int inneriterations = 500;
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[inneriterations];
for (int i = 0; i < inneriterations; i++)
{
processes[i] = CreateProcessLong();
processes[i].Start();
}
// Begin Testing - Kill all of the processes
using (iteration.StartMeasurement())
for (int i = 0; i < inneriterations; i++)
processes[i].Kill();
// Cleanup the processes
foreach (Process proc in processes)
{
proc.WaitForExit();
proc.Dispose();
}
}
}
[Benchmark(Skip="Issue 16653")]
public void GetProcessesByName()
{
// To offset a different number of processes on a different machine, I create dummy processes
// until our baseline number is reached.
const int baseline = 300;
int numberOfProcesses = Process.GetProcesses().Length;
List<Process> processes = new List<Process>();
while (numberOfProcesses++ <= baseline)
{
Process proc = CreateProcess();
proc.Start();
processes.Add(proc);
}
// Begin testing
foreach (var iteration in Benchmark.Iterations)
using (iteration.StartMeasurement())
{
Process.GetProcessesByName("1"); Process.GetProcessesByName("1"); Process.GetProcessesByName("1");
Process.GetProcessesByName("1"); Process.GetProcessesByName("1"); Process.GetProcessesByName("1");
Process.GetProcessesByName("1"); Process.GetProcessesByName("1"); Process.GetProcessesByName("1");
}
// Cleanup
foreach (Process proc in processes)
{
if (!proc.HasExited)
proc.Kill();
proc.WaitForExit();
proc.Dispose();
}
}
[Benchmark(Skip="Issue 16653")]
public void GetId()
{
int id;
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[500];
for (int i = 0; i < 500; i++)
{
processes[i] = CreateProcess();
processes[i].Start();
}
// Begin Testing
using (iteration.StartMeasurement())
for (int i = 0; i < 500; i++)
id = processes[i].Id;
foreach (Process proc in processes)
{
if (!proc.HasExited)
proc.Kill();
proc.WaitForExit();
proc.Dispose();
}
}
}
[Benchmark(Skip="Issue 16653")]
public void Start()
{
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[500];
for (int i = 0; i < 500; i++)
{
processes[i] = CreateProcess();
}
// Begin Testing
using (iteration.StartMeasurement())
for (int i = 0; i < 500; i++)
processes[i].Start();
// Cleanup the processes
foreach (Process proc in processes)
{
if (!proc.HasExited)
proc.Kill();
proc.WaitForExit();
proc.Dispose();
}
}
}
[Benchmark(Skip="Issue 16653")]
public void GetHasExited()
{
bool result;
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[500];
for (int i = 0; i < 500; i++)
{
processes[i] = CreateProcess();
processes[i].Start();
}
// Begin Testing
using (iteration.StartMeasurement())
for (int i = 0; i < 500; i++)
result = processes[i].HasExited;
// Cleanup the processes
foreach (Process proc in processes)
{
if (!proc.HasExited)
proc.Kill();
proc.WaitForExit();
proc.Dispose();
}
}
}
[Benchmark(Skip="Issue 16653")]
public void GetExitCode()
{
int result;
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[500];
for (int i = 0; i < 500; i++)
{
processes[i] = CreateProcess();
processes[i].Start();
processes[i].WaitForExit();
}
// Begin Testing
using (iteration.StartMeasurement())
for (int i = 0; i < 500; i++)
result = processes[i].ExitCode;
// Cleanup the processes
foreach (Process proc in processes)
{
proc.Dispose();
}
}
}
[Benchmark(Skip="Issue 16653")]
public void GetStartInfo()
{
ProcessStartInfo result;
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[500];
for (int i = 0; i < 500; i++)
{
processes[i] = CreateProcess();
processes[i].Start();
}
// Begin Testing
using (iteration.StartMeasurement())
for (int i = 0; i < 500; i++)
result = processes[i].StartInfo;
// Cleanup the processes
foreach (Process proc in processes)
{
if (!proc.HasExited)
proc.Kill();
proc.WaitForExit();
proc.Dispose();
}
}
}
[Benchmark(Skip="Issue 16653")]
public void GetStandardOutput()
{
const int innerIterations = 200;
foreach (var iteration in Benchmark.Iterations)
{
// Create several processes to test on
Process[] processes = new Process[innerIterations];
Func<int> method = () =>
{
for (int j = 0; j < innerIterations; j++)
Console.WriteLine("Redirected String");
return SuccessExitCode;
};
for (int i = 0; i < innerIterations; i++)
{
processes[i] = CreateProcess(method);
processes[i].StartInfo.RedirectStandardOutput = true;
processes[i].Start();
}
// Begin Testing
using (iteration.StartMeasurement())
for (int i = 0; i < innerIterations; i++)
processes[i].StandardOutput.ReadToEnd();
// Cleanup the processes
foreach (Process proc in processes)
{
if (!proc.HasExited)
proc.Kill();
proc.WaitForExit();
proc.Dispose();
}
}
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Orleans.Providers.Streams.Common;
using Orleans.Runtime;
using Orleans.Streams;
using OrleansAWSUtils.Streams;
using AWSUtils.Tests.StorageTests;
using TestExtensions;
using Xunit;
using Xunit.Abstractions;
using OrleansAWSUtils.Storage;
using Orleans.Configuration;
using Orleans.Internal;
namespace AWSUtils.Tests.Streaming
{
[TestCategory("AWS"), TestCategory("SQS")]
[Collection(TestEnvironmentFixture.DefaultCollection)]
public class SQSAdapterTests : IAsyncLifetime
{
private readonly ITestOutputHelper output;
private readonly TestEnvironmentFixture fixture;
private const int NumBatches = 20;
private const int NumMessagesPerBatch = 20;
private readonly string clusterId;
public static readonly string SQS_STREAM_PROVIDER_NAME = "SQSAdapterTests";
public SQSAdapterTests(ITestOutputHelper output, TestEnvironmentFixture fixture)
{
if (!AWSTestConstants.IsSqsAvailable)
{
throw new SkipException("Empty connection string");
}
this.output = output;
this.fixture = fixture;
this.clusterId = MakeClusterId();
}
public Task InitializeAsync() => Task.CompletedTask;
public async Task DisposeAsync()
{
if (!string.IsNullOrWhiteSpace(AWSTestConstants.DefaultSQSConnectionString))
{
await SQSStreamProviderUtils.DeleteAllUsedQueues(
SQS_STREAM_PROVIDER_NAME,
this.clusterId,
AWSTestConstants.DefaultSQSConnectionString,
NullLoggerFactory.Instance);
}
}
[SkippableFact]
public async Task SendAndReceiveFromSQS()
{
var options = new SqsOptions
{
ConnectionString = AWSTestConstants.DefaultSQSConnectionString,
};
var adapterFactory = new SQSAdapterFactory(SQS_STREAM_PROVIDER_NAME, options, new HashRingStreamQueueMapperOptions(), new SimpleQueueCacheOptions(), Options.Create(new ClusterOptions()), null, null);
adapterFactory.Init();
await SendAndReceiveFromQueueAdapter(adapterFactory);
}
private async Task SendAndReceiveFromQueueAdapter(IQueueAdapterFactory adapterFactory)
{
IQueueAdapter adapter = await adapterFactory.CreateAdapter();
IQueueAdapterCache cache = adapterFactory.GetQueueAdapterCache();
// Create receiver per queue
IStreamQueueMapper mapper = adapterFactory.GetStreamQueueMapper();
Dictionary<QueueId, IQueueAdapterReceiver> receivers = mapper.GetAllQueues().ToDictionary(queueId => queueId, adapter.CreateReceiver);
Dictionary<QueueId, IQueueCache> caches = mapper.GetAllQueues().ToDictionary(queueId => queueId, cache.CreateQueueCache);
await Task.WhenAll(receivers.Values.Select(receiver => receiver.Initialize(TimeSpan.FromSeconds(5))));
// test using 2 streams
Guid streamId1 = Guid.NewGuid();
Guid streamId2 = Guid.NewGuid();
int receivedBatches = 0;
var streamsPerQueue = new ConcurrentDictionary<QueueId, HashSet<StreamId>>();
// reader threads (at most 2 active queues because only two streams)
var work = new List<Task>();
foreach (KeyValuePair<QueueId, IQueueAdapterReceiver> receiverKvp in receivers)
{
QueueId queueId = receiverKvp.Key;
var receiver = receiverKvp.Value;
var qCache = caches[queueId];
Task task = Task.Factory.StartNew(() =>
{
while (receivedBatches < NumBatches)
{
var messages = receiver.GetQueueMessagesAsync(SQSStorage.MAX_NUMBER_OF_MESSAGE_TO_PEAK).Result.ToArray();
if (!messages.Any())
{
continue;
}
foreach (var message in messages.Cast<SQSBatchContainer>())
{
streamsPerQueue.AddOrUpdate(queueId,
id => new HashSet<StreamId> { message.StreamId },
(id, set) =>
{
set.Add(message.StreamId);
return set;
});
output.WriteLine("Queue {0} received message on stream {1}", queueId,
message.StreamId);
Assert.Equal(NumMessagesPerBatch / 2, message.GetEvents<int>().Count()); // "Half the events were ints"
Assert.Equal(NumMessagesPerBatch / 2, message.GetEvents<string>().Count()); // "Half the events were strings"
}
Interlocked.Add(ref receivedBatches, messages.Length);
qCache.AddToCache(messages);
}
});
work.Add(task);
}
// send events
List<object> events = CreateEvents(NumMessagesPerBatch);
work.Add(Task.Factory.StartNew(() => Enumerable.Range(0, NumBatches)
.Select(i => i % 2 == 0 ? streamId1 : streamId2)
.ToList()
.ForEach(streamId =>
adapter.QueueMessageBatchAsync(StreamId.Create(streamId.ToString(), streamId),
events.Take(NumMessagesPerBatch).ToArray(), null, RequestContextExtensions.Export(this.fixture.DeepCopier)).Wait())));
await Task.WhenAll(work);
// Make sure we got back everything we sent
Assert.Equal(NumBatches, receivedBatches);
// check to see if all the events are in the cache and we can enumerate through them
StreamSequenceToken firstInCache = new EventSequenceTokenV2(0);
foreach (KeyValuePair<QueueId, HashSet<StreamId>> kvp in streamsPerQueue)
{
var receiver = receivers[kvp.Key];
var qCache = caches[kvp.Key];
foreach (StreamId streamGuid in kvp.Value)
{
// read all messages in cache for stream
IQueueCacheCursor cursor = qCache.GetCacheCursor(streamGuid, firstInCache);
int messageCount = 0;
StreamSequenceToken tenthInCache = null;
StreamSequenceToken lastToken = firstInCache;
while (cursor.MoveNext())
{
Exception ex;
messageCount++;
IBatchContainer batch = cursor.GetCurrent(out ex);
output.WriteLine("Token: {0}", batch.SequenceToken);
Assert.True(batch.SequenceToken.CompareTo(lastToken) >= 0, $"order check for event {messageCount}");
lastToken = batch.SequenceToken;
if (messageCount == 10)
{
tenthInCache = batch.SequenceToken;
}
}
output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid);
Assert.Equal(NumBatches / 2, messageCount);
Assert.NotNull(tenthInCache);
// read all messages from the 10th
cursor = qCache.GetCacheCursor(streamGuid, tenthInCache);
messageCount = 0;
while (cursor.MoveNext())
{
messageCount++;
}
output.WriteLine("On Queue {0} we received a total of {1} message on stream {2}", kvp.Key, messageCount, streamGuid);
const int expected = NumBatches / 2 - 10 + 1; // all except the first 10, including the 10th (10 + 1)
Assert.Equal(expected, messageCount);
}
}
}
private List<object> CreateEvents(int count)
{
return Enumerable.Range(0, count).Select(i =>
{
if (i % 2 == 0)
{
return ThreadSafeRandom.Next(int.MaxValue) as object;
}
return ThreadSafeRandom.Next(int.MaxValue).ToString(CultureInfo.InvariantCulture);
}).ToList();
}
internal static string MakeClusterId()
{
const string DeploymentIdFormat = "cluster-{0}";
string now = DateTime.UtcNow.ToString("yyyy-MM-dd-hh-mm-ss-ffff");
return String.Format(DeploymentIdFormat, now);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Internal.Resources
{
using Azure;
using Management;
using Internal;
using Rest;
using Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Azure.Internal.Subscriptions.Models;
/// <summary>
/// ProvidersOperations operations.
/// </summary>
internal partial class ProvidersOperations : IServiceOperations<ResourceManagementClient>, IProvidersOperations
{
/// <summary>
/// Initializes a new instance of the ProvidersOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal ProvidersOperations(ResourceManagementClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the ResourceManagementClient
/// </summary>
public ResourceManagementClient Client { get; private set; }
/// <summary>
/// Unregisters a subscription from a resource provider.
/// </summary>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider to unregister.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<Provider>> UnregisterWithHttpMessagesAsync(string resourceProviderNamespace, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceProviderNamespace == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceProviderNamespace");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceProviderNamespace", resourceProviderNamespace);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Unregister", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/unregister").ToString();
_url = _url.Replace("{resourceProviderNamespace}", System.Uri.EscapeDataString(resourceProviderNamespace));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<Provider>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Provider>(_responseContent, Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Registers a subscription with a resource provider.
/// </summary>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider to register.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<Provider>> RegisterWithHttpMessagesAsync(string resourceProviderNamespace, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceProviderNamespace == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceProviderNamespace");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceProviderNamespace", resourceProviderNamespace);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Register", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register").ToString();
_url = _url.Replace("{resourceProviderNamespace}", System.Uri.EscapeDataString(resourceProviderNamespace));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<Provider>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Provider>(_responseContent, Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all resource providers for a subscription.
/// </summary>
/// <param name='top'>
/// The number of results to return. If null is passed returns all deployments.
/// </param>
/// <param name='expand'>
/// The properties to include in the results. For example, use
/// &$expand=metadata in the query string to retrieve resource provider
/// metadata. To include property aliases in response, use
/// $expand=resourceTypes/aliases.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<Provider>>> ListWithHttpMessagesAsync(int? top = default(int?), string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("top", top);
tracingParameters.Add("expand", expand);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers").ToString();
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (top != null)
{
_queryParameters.Add(string.Format("$top={0}", System.Uri.EscapeDataString(Rest.Serialization.SafeJsonConvert.SerializeObject(top, Client.SerializationSettings).Trim('"'))));
}
if (expand != null)
{
_queryParameters.Add(string.Format("$expand={0}", System.Uri.EscapeDataString(expand)));
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<Provider>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<Provider>>(_responseContent, Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets the specified resource provider.
/// </summary>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider.
/// </param>
/// <param name='expand'>
/// The $expand query parameter. For example, to include property aliases in
/// response, use $expand=resourceTypes/aliases.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<Provider>> GetWithHttpMessagesAsync(string resourceProviderNamespace, string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceProviderNamespace == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceProviderNamespace");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("expand", expand);
tracingParameters.Add("resourceProviderNamespace", resourceProviderNamespace);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}").ToString();
_url = _url.Replace("{resourceProviderNamespace}", System.Uri.EscapeDataString(resourceProviderNamespace));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (expand != null)
{
_queryParameters.Add(string.Format("$expand={0}", System.Uri.EscapeDataString(expand)));
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<Provider>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Provider>(_responseContent, Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all resource providers for a subscription.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<Provider>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new System.Net.Http.HttpRequestMessage();
System.Net.Http.HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new System.Net.Http.HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (Newtonsoft.Json.JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<Provider>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<Provider>>(_responseContent, Client.DeserializationSettings);
}
catch (Newtonsoft.Json.JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
// This file is auto generatored. Don't modify it.
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Wu Yuntao
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
using System;
namespace SecurePrimitive
{
public partial struct SPByte
{
#region SPByte <-> SPSByte
public static explicit operator SPByte(SPSByte value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPSByte(SPByte value)
{
return new SPSByte((sbyte)value.Value);
}
#endregion
#region SPByte <-> sbyte
public static explicit operator SPByte(sbyte value)
{
return new SPByte((byte)value);
}
public static explicit operator sbyte(SPByte value)
{
return (sbyte)value.Value;
}
#endregion
#region SPByte <-> byte
public static implicit operator SPByte(byte value)
{
return new SPByte(value);
}
public static implicit operator byte(SPByte value)
{
return value.Value;
}
#endregion
#region SPByte <-> SPInt16
public static explicit operator SPByte(SPInt16 value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPInt16(SPByte value)
{
return new SPInt16((short)value.Value);
}
#endregion
#region SPByte <-> short
public static explicit operator SPByte(short value)
{
return new SPByte((byte)value);
}
public static explicit operator short(SPByte value)
{
return (short)value.Value;
}
#endregion
#region SPByte <-> SPUInt16
public static explicit operator SPByte(SPUInt16 value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPUInt16(SPByte value)
{
return new SPUInt16((ushort)value.Value);
}
#endregion
#region SPByte <-> ushort
public static explicit operator SPByte(ushort value)
{
return new SPByte((byte)value);
}
public static explicit operator ushort(SPByte value)
{
return (ushort)value.Value;
}
#endregion
#region SPByte <-> SPInt32
public static explicit operator SPByte(SPInt32 value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPInt32(SPByte value)
{
return new SPInt32((int)value.Value);
}
#endregion
#region SPByte <-> int
public static explicit operator SPByte(int value)
{
return new SPByte((byte)value);
}
public static explicit operator int(SPByte value)
{
return (int)value.Value;
}
#endregion
#region SPByte <-> SPUInt32
public static explicit operator SPByte(SPUInt32 value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPUInt32(SPByte value)
{
return new SPUInt32((uint)value.Value);
}
#endregion
#region SPByte <-> uint
public static explicit operator SPByte(uint value)
{
return new SPByte((byte)value);
}
public static explicit operator uint(SPByte value)
{
return (uint)value.Value;
}
#endregion
#region SPByte <-> SPInt64
public static explicit operator SPByte(SPInt64 value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPInt64(SPByte value)
{
return new SPInt64((long)value.Value);
}
#endregion
#region SPByte <-> long
public static explicit operator SPByte(long value)
{
return new SPByte((byte)value);
}
public static explicit operator long(SPByte value)
{
return (long)value.Value;
}
#endregion
#region SPByte <-> SPUInt64
public static explicit operator SPByte(SPUInt64 value)
{
return new SPByte((byte)value.Value);
}
public static explicit operator SPUInt64(SPByte value)
{
return new SPUInt64((ulong)value.Value);
}
#endregion
#region SPByte <-> ulong
public static explicit operator SPByte(ulong value)
{
return new SPByte((byte)value);
}
public static explicit operator ulong(SPByte value)
{
return (ulong)value.Value;
}
#endregion
}
}
| |
using UnityEngine;
using System.Collections.Generic;
using UMA.CharacterSystem;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.Animations;
#endif
namespace UMA
{
public class UMAAssetIndexer : MonoBehaviour, ISerializationCallbackReceiver
{
#region constants and static strings
public static string SortOrder = "Name";
public static string[] SortOrders = { "Name", "AssetName" };
public static Dictionary<string, System.Type> TypeFromString = new Dictionary<string, System.Type>();
public static Dictionary<string, AssetItem> GuidTypes = new Dictionary<string, AssetItem>();
#endregion
#region Fields
public bool AutoUpdate;
private Dictionary<System.Type, System.Type> TypeToLookup = new Dictionary<System.Type, System.Type>()
{
{ (typeof(SlotDataAsset)),(typeof(SlotDataAsset)) },
{ (typeof(OverlayDataAsset)),(typeof(OverlayDataAsset)) },
{ (typeof(RaceData)),(typeof(RaceData)) },
{ (typeof(UMATextRecipe)),(typeof(UMATextRecipe)) },
{ (typeof(UMAWardrobeRecipe)),(typeof(UMAWardrobeRecipe)) },
{ (typeof(UMAWardrobeCollection)),(typeof(UMAWardrobeCollection)) },
{ (typeof(RuntimeAnimatorController)),(typeof(RuntimeAnimatorController)) },
{ (typeof(AnimatorOverrideController)),(typeof(RuntimeAnimatorController)) },
#if UNITY_EDITOR
{ (typeof(AnimatorController)),(typeof(RuntimeAnimatorController)) },
#endif
{ typeof(TextAsset), typeof(TextAsset) },
{ (typeof(DynamicUMADnaAsset)), (typeof(DynamicUMADnaAsset)) }
};
// The names of the fully qualified types.
public List<string> IndexedTypeNames = new List<string>();
// These list is used so Unity will serialize the data
public List<AssetItem> SerializedItems = new List<AssetItem>();
// This is really where we keep the data.
private Dictionary<System.Type, Dictionary<string, AssetItem>> TypeLookup = new Dictionary<System.Type, Dictionary<string, AssetItem>>();
// This list tracks the types for use in iterating through the dictionaries
private System.Type[] Types =
{
(typeof(SlotDataAsset)),
(typeof(OverlayDataAsset)),
(typeof(RaceData)),
(typeof(UMATextRecipe)),
(typeof(UMAWardrobeRecipe)),
(typeof(UMAWardrobeCollection)),
(typeof(RuntimeAnimatorController)),
(typeof(AnimatorOverrideController)),
#if UNITY_EDITOR
(typeof(AnimatorController)),
#endif
(typeof(DynamicUMADnaAsset)),
(typeof(TextAsset))
};
#endregion
#region Static Fields
static GameObject theIndex = null;
static UMAAssetIndexer theIndexer = null;
#endregion
public static System.Diagnostics.Stopwatch StartTimer()
{
#if TIMEINDEXER
Debug.Log("Timer started at " + Time.realtimeSinceStartup + " Sec");
System.Diagnostics.Stopwatch st = new System.Diagnostics.Stopwatch();
st.Start();
return st;
#else
return null;
#endif
}
public static void StopTimer(System.Diagnostics.Stopwatch st, string Status)
{
#if TIMEINDEXER
st.Stop();
Debug.Log(Status + " Completed " + st.ElapsedMilliseconds + "ms");
return;
#endif
}
public static UMAAssetIndexer Instance
{
get
{
if (theIndex == null || theIndexer == null)
{
#if UNITY_EDITOR
var st = StartTimer();
theIndex = Resources.Load("AssetIndexer") as GameObject;
if (theIndex == null)
{
return null;
}
theIndexer = theIndex.GetComponent<UMAAssetIndexer>();
if (theIndexer == null)
{
return null;
}
StopTimer(st,"Asset index load");
#else
theIndex = GameObject.Instantiate(Resources.Load<GameObject>("AssetIndexer")) as GameObject;
theIndex.hideFlags = HideFlags.HideAndDontSave;
theIndexer = theIndex.GetComponent<UMAAssetIndexer>();
#endif
}
return theIndexer;
}
}
#if UNITY_EDITOR
public void OnPostprocessAllAssets(string[] importedAssets, string[] deletedAssets, string[] movedAssets, string[] movedFromAssetPaths)
{
if (!AutoUpdate)
{
return;
}
bool changed = false;
// Build a dictionary of the items by path.
Dictionary<string, AssetItem> ItemsByPath = new Dictionary<string, AssetItem>();
UpdateSerializedList();
foreach (AssetItem ai in SerializedItems)
{
if (ItemsByPath.ContainsKey(ai._Path))
{
Debug.Log("Duplicate path for item: " + ai._Path);
continue;
}
ItemsByPath.Add(ai._Path, ai);
}
// see if they moved it in the editor.
for (int i = 0; i < movedAssets.Length; i++)
{
string NewPath = movedAssets[i];
string OldPath = movedFromAssetPaths[i];
// Check to see if this is an indexed asset.
if (ItemsByPath.ContainsKey(OldPath))
{
changed = true;
// If they moved it into an Asset Bundle folder, then we need to "unindex" it.
if (InAssetBundleFolder(NewPath))
{
// Null it out, so we don't add it to the index...
ItemsByPath[OldPath] = null;
continue;
}
//
ItemsByPath[OldPath]._Path = NewPath;
}
}
// Rebuild the tables
SerializedItems.Clear();
foreach (AssetItem ai in ItemsByPath.Values)
{
// We null things out when we want to delete them. This prevents it from going back into
// the dictionary when rebuilt.
if (ai == null)
continue;
SerializedItems.Add(ai);
}
UpdateSerializedDictionaryItems();
if (changed)
{
ForceSave();
}
}
/// <summary>
/// Force the Index to save and reload
/// </summary>
public void ForceSave()
{
var st = StartTimer();
EditorUtility.SetDirty(this.gameObject);
AssetDatabase.SaveAssets();
StopTimer(st, "ForceSave");
}
#endif
#region Manage Types
/// <summary>
/// Returns a list of all types that we know about.
/// </summary>
/// <returns></returns>
public System.Type[] GetTypes()
{
return Types;
}
public bool IsIndexedType(System.Type type)
{
foreach (System.Type check in TypeToLookup.Keys)
{
if (check == type)
return true;
}
return false;
}
public bool IsAdditionalIndexedType(string QualifiedName)
{
foreach (string s in IndexedTypeNames)
{
if (s == QualifiedName)
return true;
}
return false;
}
/// <summary>
/// Add a type to the types tracked
/// </summary>
/// <param name="sType"></param>
public void AddType(System.Type sType)
{
string QualifiedName = sType.AssemblyQualifiedName;
if (IsAdditionalIndexedType(QualifiedName)) return;
List<System.Type> newTypes = new List<System.Type>();
newTypes.AddRange(Types);
newTypes.Add(sType);
Types = newTypes.ToArray();
TypeToLookup.Add(sType, sType);
IndexedTypeNames.Add(sType.AssemblyQualifiedName);
BuildStringTypes();
}
public void RemoveType(System.Type sType)
{
string QualifiedName = sType.AssemblyQualifiedName;
if (!IsAdditionalIndexedType(QualifiedName)) return;
TypeToLookup.Remove(sType);
List<System.Type> newTypes = new List<System.Type>();
newTypes.AddRange(Types);
newTypes.Remove(sType);
Types = newTypes.ToArray();
TypeLookup.Remove(sType);
IndexedTypeNames.Remove(sType.AssemblyQualifiedName);
BuildStringTypes();
}
#endregion
#region Access the index
/// <summary>
/// Return the asset specified, if it exists.
/// if it can't be found by name, then we do a scan of the assets to see if
/// we can find the name directly on the object, and return that.
/// We then rebuild the index to make sure it's up to date.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="Name"></param>
/// <returns></returns>
public AssetItem GetAssetItem<T>(string Name)
{
System.Type ot = typeof(T);
System.Type theType = TypeToLookup[ot];
Dictionary<string, AssetItem> TypeDic = GetAssetDictionary(theType);
if (TypeDic.ContainsKey(Name))
{
return TypeDic[Name];
}
/*
foreach (AssetItem ai in TypeDic.Values)
{
if (Name == ai.EvilName)
{
RebuildIndex();
return ai;
}
}*/
return null;
}
/// <summary>
/// Gets the asset hash and name for the given object
/// </summary>
private void GetEvilAssetNameAndHash(System.Type type, Object o, ref string assetName, int assetHash)
{
if (o is SlotDataAsset)
{
SlotDataAsset sd = o as SlotDataAsset;
assetName = sd.slotName;
assetHash = sd.nameHash;
}
else if (o is OverlayDataAsset)
{
OverlayDataAsset od = o as OverlayDataAsset;
assetName = od.overlayName;
assetHash = od.nameHash;
}
else if (o is RaceData)
{
RaceData rd = o as RaceData;
assetName = rd.raceName;
assetHash = UMAUtils.StringToHash(assetName);
}
else
{
assetName = o.name;
assetHash = UMAUtils.StringToHash(assetName);
}
}
public List<T> GetAllAssets<T>(string[] foldersToSearch = null) where T : UnityEngine.Object
{
var st = StartTimer();
var ret = new List<T>();
System.Type ot = typeof(T);
System.Type theType = TypeToLookup[ot];
Dictionary<string, AssetItem> TypeDic = GetAssetDictionary(theType);
foreach (KeyValuePair<string, AssetItem> kp in TypeDic)
{
if (AssetFolderCheck(kp.Value, foldersToSearch))
ret.Add((kp.Value.Item as T));
}
StopTimer(st, "GetAllAssets type=" + typeof(T).Name);
return ret;
}
public T GetAsset<T>(int nameHash, string[] foldersToSearch = null) where T : UnityEngine.Object
{
System.Diagnostics.Stopwatch st = new System.Diagnostics.Stopwatch();
st.Start();
System.Type ot = typeof(T);
Dictionary<string, AssetItem> TypeDic = (Dictionary<string, AssetItem>)TypeLookup[ot];
string assetName = "";
int assetHash = -1;
foreach (KeyValuePair<string, AssetItem> kp in TypeDic)
{
assetName = "";
assetHash = -1;
GetEvilAssetNameAndHash(typeof(T), kp.Value.Item, ref assetName, assetHash);
if (assetHash == nameHash)
{
if (AssetFolderCheck(kp.Value, foldersToSearch))
{
st.Stop();
if (st.ElapsedMilliseconds > 2)
{
Debug.Log("GetAsset 0 for type "+typeof(T).Name+" completed in " + st.ElapsedMilliseconds + "ms");
}
return (kp.Value.Item as T);
}
else
{
st.Stop();
if (st.ElapsedMilliseconds > 2)
{
Debug.Log("GetAsset 1 for type " + typeof(T).Name + " completed in " + st.ElapsedMilliseconds + "ms");
}
return null;
}
}
}
st.Stop();
if (st.ElapsedMilliseconds > 2)
{
Debug.Log("GetAsset 2 for type " + typeof(T).Name + " completed in " + st.ElapsedMilliseconds + "ms");
}
return null;
}
public T GetAsset<T>(string name, string[] foldersToSearch = null) where T : UnityEngine.Object
{
var thisAssetItem = GetAssetItem<T>(name);
if (thisAssetItem != null)
{
if (AssetFolderCheck(thisAssetItem, foldersToSearch))
return (thisAssetItem.Item as T);
else
return null;
}
else
{
return null;
}
}
/// <summary>
/// Checks if the given asset path resides in one of the given folder paths. Returns true if foldersToSearch is null or empty and no check is required
/// </summary>
private bool AssetFolderCheck(AssetItem itemToCheck, string[] foldersToSearch = null)
{
if (foldersToSearch == null)
return true;
if (foldersToSearch.Length == 0)
return true;
for (int i = 0; i < foldersToSearch.Length; i++)
{
if (itemToCheck._Path.IndexOf(foldersToSearch[i]) > -1)
return true;
}
return false;
}
#if UNITY_EDITOR
/// <summary>
/// Check to see if something is an an assetbundle. If so, don't add it
/// </summary>
/// <param name="path"></param>
/// <returns></returns>
public bool InAssetBundle(string path)
{
// path = System.IO.Path.GetDirectoryName(path);
string[] assetBundleNames = AssetDatabase.GetAllAssetBundleNames();
List<string> pathsInBundle;
for (int i = 0; i < assetBundleNames.Length; i++)
{
pathsInBundle = new List<string>(AssetDatabase.GetAssetPathsFromAssetBundle(assetBundleNames[i]));
if (pathsInBundle.Contains(path))
return true;
}
return false;
}
public bool InAssetBundleFolder(string path)
{
path = System.IO.Path.GetDirectoryName(path);
string[] assetBundleNames = AssetDatabase.GetAllAssetBundleNames();
List<string> pathsInBundle;
for (int i = 0; i < assetBundleNames.Length; i++)
{
pathsInBundle = new List<string>(AssetDatabase.GetAssetPathsFromAssetBundle(assetBundleNames[i]));
foreach (string s in pathsInBundle)
{
if (System.IO.Path.GetDirectoryName(s) == path)
return true;
}
}
return false;
}
#endif
#endregion
#region Add Remove Assets
/// <summary>
/// Adds an asset to the index. Does NOT save the asset! you must do that separately.
/// </summary>
/// <param name="type">System Type of the object to add.</param>
/// <param name="name">Name for the object.</param>
/// <param name="path">Path to the object.</param>
/// <param name="o">The Object to add.</param>
/// <param name="skipBundleCheck">Option to skip checking Asset Bundles.</param>
public void AddAsset(System.Type type, string name, string path, Object o, bool skipBundleCheck = false)
{
if (o == null)
{
Debug.Log("Skipping null item");
return;
}
if (type == null)
{
type = o.GetType();
}
AssetItem ai = new AssetItem(type, name, path, o);
AddAssetItem(ai, skipBundleCheck);
}
/// <summary>
/// Adds an asset to the index. If the name already exists, it is not added. (Should we do this, or replace it?)
/// </summary>
/// <param name="ai"></param>
/// <param name="SkipBundleCheck"></param>
private void AddAssetItem(AssetItem ai, bool SkipBundleCheck = false)
{
try
{
System.Type theType = TypeToLookup[ai._Type];
Dictionary<string, AssetItem> TypeDic = GetAssetDictionary(theType);
// Get out if we already have it.
if (TypeDic.ContainsKey(ai._Name))
{
// Debug.Log("Duplicate asset " + ai._Name + " was ignored.");
return;
}
if (ai._Name.ToLower().Contains((ai._Type.Name + "placeholder").ToLower()))
{
//Debug.Log("Placeholder asset " + ai._Name + " was ignored. Placeholders are not indexed.");
return;
}
#if UNITY_EDITOR
if (!SkipBundleCheck)
{
string Path = AssetDatabase.GetAssetPath(ai.Item.GetInstanceID());
if (InAssetBundle(Path))
{
// Debug.Log("Asset " + ai._Name + "is in Asset Bundle, and was not added to the index.");
return;
}
}
#endif
TypeDic.Add(ai._Name, ai);
if (GuidTypes.ContainsKey(ai._Guid))
{
return;
}
GuidTypes.Add(ai._Guid, ai);
}
catch (System.Exception ex)
{
UnityEngine.Debug.LogWarning("Exception in UMAAssetIndexer.AddAssetItem: " + ex);
}
}
#if UNITY_EDITOR
public AssetItem FromGuid(string GUID)
{
if (GuidTypes.ContainsKey(GUID))
{
return GuidTypes[GUID];
}
return null;
}
/// <summary>
/// This is the evil version of AddAsset. This version cares not for the good of the project, nor
/// does it care about readability, expandibility, and indeed, hates goodness with every beat of it's
/// tiny evil shrivelled heart.
/// I started going down the good path - I created an interface to get the name info, added it to all the
/// classes. Then we ran into RuntimeAnimatorController. I would have had to wrap it. And Visual Studio kept
/// complaining about the interface, even though Unity thought it was OK.
///
/// So in the end, good was defeated. And would never raise it's sword in the pursuit of chivalry again.
///
/// And EvilAddAsset doesn't save either. You have to do that manually.
/// </summary>
/// <param name="type"></param>
/// <param name="o"></param>
public void EvilAddAsset(System.Type type, Object o)
{
AssetItem ai = null;
ai = new AssetItem(type, o);
ai._Path = AssetDatabase.GetAssetPath(o.GetInstanceID());
AddAssetItem(ai);
}
/// <summary>
/// Removes an asset from the index
/// </summary>
/// <param name="type"></param>
/// <param name="Name"></param>
public void RemoveAsset(System.Type type, string Name)
{
System.Type theType = TypeToLookup[type];
Dictionary<string, AssetItem> TypeDic = GetAssetDictionary(theType);
if (TypeDic.ContainsKey(Name))
{
AssetItem ai = TypeDic[Name];
TypeDic.Remove(Name);
GuidTypes.Remove(Name);
}
}
#endif
#endregion
#region Maintenance
/// <summary>
/// Updates the dictionaries from this list.
/// Used when restoring items after modification, or after deserialization.
/// </summary>
private void UpdateSerializedDictionaryItems()
{
GuidTypes = new Dictionary<string, AssetItem>();
foreach (System.Type type in Types)
{
CreateLookupDictionary(type);
}
foreach (AssetItem ai in SerializedItems)
{
// We null things out when we want to delete them. This prevents it from going back into
// the dictionary when rebuilt.
if (ai == null)
continue;
AddAssetItem(ai, true);
}
}
/// <summary>
/// Creates a lookup dictionary for a list. Used when reloading after deserialization
/// </summary>
/// <param name="type"></param>
private void CreateLookupDictionary(System.Type type)
{
Dictionary<string, AssetItem> dic = new Dictionary<string, AssetItem>();
if (TypeLookup.ContainsKey(type))
{
TypeLookup[type] = dic;
}
else
{
TypeLookup.Add(type, dic);
}
}
/// <summary>
/// Updates the list so all items can be processed at once, or for
/// serialization.
/// </summary>
private void UpdateSerializedList()
{
SerializedItems.Clear();
foreach (System.Type type in TypeToLookup.Keys)
{
if (type == TypeToLookup[type])
{
Dictionary<string, AssetItem> TypeDic = GetAssetDictionary(type);
foreach (AssetItem ai in TypeDic.Values)
{
// Don't add asset bundle or resource items to index. They are loaded on demand.
if (ai.IsAssetBundle == false && ai.IsResource == false)
{
SerializedItems.Add(ai);
}
}
}
}
}
/// <summary>
/// Builds a list of types and a string to look them up.
/// </summary>
private void BuildStringTypes()
{
TypeFromString.Clear();
foreach (System.Type st in Types)
{
TypeFromString.Add(st.Name, st);
}
}
#if UNITY_EDITOR
public void RepairAndCleanup()
{
// Rebuild the tables
UpdateSerializedList();
for(int i=0;i<SerializedItems.Count;i++)
{
AssetItem ai = SerializedItems[i];
if (!ai.IsAssetBundle)
{
// If we already have a reference to the item, let's verify that everything is correct on it.
Object obj = ai.Item;
if (obj != null)
{
ai._Name = ai.EvilName;
ai._Path = AssetDatabase.GetAssetPath(obj.GetInstanceID());
ai._Guid = AssetDatabase.AssetPathToGUID(ai._Path);
}
else
{
// Clear out the item reference so we will attempt to fix it if it's broken.
ai._SerializedItem = null;
// This will attempt to load the item, using the path, guid or name (in that order).
// This is in case we didn't have a reference to the item, and it was moved
ai.CachSerializedItem();
// If an item can't be found and we didn't ahve a reference to it, then we need to delete it.
if (ai._SerializedItem == null)
{
// Can't be found or loaded
// null it out, so it doesn't get added back.
SerializedItems[i] = null;
}
}
}
}
UpdateSerializedDictionaryItems();
ForceSave();
}
public void AddEverything(bool includeText)
{
Clear(false);
foreach(string s in TypeFromString.Keys)
{
System.Type CurrentType = TypeFromString[s];
if (!includeText)
{
if (CurrentType == typeof(TextAsset))
{
continue;
}
}
if (s != "AnimatorController")
{
string[] guids = AssetDatabase.FindAssets("t:" + s);
foreach (string guid in guids)
{
string Path = AssetDatabase.GUIDToAssetPath(guid);
if (Path.ToLower().Contains(".shader"))
{
continue;
}
Object o = AssetDatabase.LoadAssetAtPath(Path, CurrentType);
if (o != null)
{
AssetItem ai = new AssetItem(CurrentType, o);
AddAssetItem(ai);
}
else
{
if (Path == null)
{
Debug.LogWarning("Cannot instantiate item " + guid);
}
else
{
Debug.LogWarning("Cannot instantiate item " + Path);
}
}
}
}
}
ForceSave();
}
/// <summary>
/// Clears the index
/// </summary>
public void Clear(bool forceSave = true)
{
// Rebuild the tables
GuidTypes.Clear();
ClearReferences();
SerializedItems.Clear();
UpdateSerializedDictionaryItems();
if (forceSave)
ForceSave();
}
/// <summary>
/// Adds references to all items by accessing the item property.
/// This forces Unity to load the item and return a reference to it.
/// When building, Unity needs the references to the items because we
/// cannot demand load them without the AssetDatabase.
/// </summary>
public void AddReferences()
{
// Rebuild the tables
UpdateSerializedList();
foreach (AssetItem ai in SerializedItems)
{
if (!ai.IsAssetBundle)
ai.CachSerializedItem();
}
UpdateSerializedDictionaryItems();
ForceSave();
}
/// <summary>
/// This releases items by dereferencing them so they can be
/// picked up by garbage collection.
/// This also makes working with the index much faster.
/// </summary>
public void ClearReferences()
{
// Rebuild the tables
UpdateSerializedList();
foreach (AssetItem ai in SerializedItems)
{
ai.ReleaseItem();
}
UpdateSerializedDictionaryItems();
ForceSave();
}
#endif
/// <summary>
/// returns the entire lookup dictionary for a specific type.
/// </summary>
/// <param name="type"></param>
/// <returns></returns>
public Dictionary<string, AssetItem> GetAssetDictionary(System.Type type)
{
System.Type LookupType = TypeToLookup[type];
if (TypeLookup.ContainsKey(LookupType) == false)
{
TypeLookup[LookupType] = new Dictionary<string, AssetItem>();
}
return TypeLookup[LookupType];
}
/// <summary>
/// Rebuilds the name indexes by dumping everything back to the list, updating the name, and then rebuilding
/// the dictionaries.
/// </summary>
public void RebuildIndex()
{
UpdateSerializedList();
foreach (AssetItem ai in SerializedItems)
{
ai._Name = ai.EvilName;
}
UpdateSerializedDictionaryItems();
}
#endregion
#region Serialization
void ISerializationCallbackReceiver.OnBeforeSerialize()
{
UpdateSerializedList();// this.SerializeAllObjects);
}
void ISerializationCallbackReceiver.OnAfterDeserialize()
{
var st = StartTimer();
#region typestuff
List<System.Type> newTypes = new List<System.Type>()
{
(typeof(SlotDataAsset)),
(typeof(OverlayDataAsset)),
(typeof(RaceData)),
(typeof(UMATextRecipe)),
(typeof(UMAWardrobeRecipe)),
(typeof(UMAWardrobeCollection)),
(typeof(RuntimeAnimatorController)),
(typeof(AnimatorOverrideController)),
#if UNITY_EDITOR
(typeof(AnimatorController)),
#endif
(typeof(DynamicUMADnaAsset)),
(typeof(TextAsset))
};
TypeToLookup = new Dictionary<System.Type, System.Type>()
{
{ (typeof(SlotDataAsset)),(typeof(SlotDataAsset)) },
{ (typeof(OverlayDataAsset)),(typeof(OverlayDataAsset)) },
{ (typeof(RaceData)),(typeof(RaceData)) },
{ (typeof(UMATextRecipe)),(typeof(UMATextRecipe)) },
{ (typeof(UMAWardrobeRecipe)),(typeof(UMAWardrobeRecipe)) },
{ (typeof(UMAWardrobeCollection)),(typeof(UMAWardrobeCollection)) },
{ (typeof(RuntimeAnimatorController)),(typeof(RuntimeAnimatorController)) },
{ (typeof(AnimatorOverrideController)),(typeof(RuntimeAnimatorController)) },
#if UNITY_EDITOR
{ (typeof(AnimatorController)),(typeof(RuntimeAnimatorController)) },
#endif
{ typeof(TextAsset), typeof(TextAsset) },
{ (typeof(DynamicUMADnaAsset)), (typeof(DynamicUMADnaAsset)) }
};
List<string> invalidTypeNames = new List<string>();
// Add the additional Types.
foreach (string s in IndexedTypeNames)
{
if (s == "")
continue;
System.Type sType = System.Type.GetType(s);
if (sType == null)
{
invalidTypeNames.Add(s);
Debug.LogWarning("Could not find type for " + s);
continue;
}
newTypes.Add(sType);
if (!TypeToLookup.ContainsKey(sType))
{
TypeToLookup.Add(sType, sType);
}
}
Types = newTypes.ToArray();
if (invalidTypeNames.Count > 0)
{
foreach (string ivs in invalidTypeNames)
{
IndexedTypeNames.Remove(ivs);
}
}
BuildStringTypes();
#endregion
UpdateSerializedDictionaryItems();
StopTimer(st, "Before Serialize");
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void MinSingle()
{
var test = new SimpleBinaryOpTest__MinSingle();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__MinSingle
{
private const int VectorSize = 32;
private const int ElementCount = VectorSize / sizeof(Single);
private static Single[] _data1 = new Single[ElementCount];
private static Single[] _data2 = new Single[ElementCount];
private static Vector256<Single> _clsVar1;
private static Vector256<Single> _clsVar2;
private Vector256<Single> _fld1;
private Vector256<Single> _fld2;
private SimpleBinaryOpTest__DataTable<Single> _dataTable;
static SimpleBinaryOpTest__MinSingle()
{
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (float)(random.NextDouble()); _data2[i] = (float)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data2[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data1[0]), VectorSize);
}
public SimpleBinaryOpTest__MinSingle()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (float)(random.NextDouble()); _data2[i] = (float)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < ElementCount; i++) { _data1[i] = (float)(random.NextDouble()); _data2[i] = (float)(random.NextDouble()); }
_dataTable = new SimpleBinaryOpTest__DataTable<Single>(_data1, _data2, new Single[ElementCount], VectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Avx.Min(
Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Avx.Min(
Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Avx.Min(
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Avx).GetMethod(nameof(Avx.Min), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Avx).GetMethod(nameof(Avx.Min), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>) })
.Invoke(null, new object[] {
Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Avx).GetMethod(nameof(Avx.Min), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Avx.Min(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr);
var result = Avx.Min(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr));
var result = Avx.Min(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr));
var result = Avx.Min(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__MinSingle();
var result = Avx.Min(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Avx.Min(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector256<Single> left, Vector256<Single> right, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[ElementCount];
Single[] inArray2 = new Single[ElementCount];
Single[] outArray = new Single[ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[ElementCount];
Single[] inArray2 = new Single[ElementCount];
Single[] outArray = new Single[ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
if (BitConverter.SingleToInt32Bits(MathF.Min(left[0], right[0])) != BitConverter.SingleToInt32Bits(result[0]))
{
Succeeded = false;
}
else
{
for (var i = 1; i < left.Length; i++)
{
if (BitConverter.SingleToInt32Bits(MathF.Min(left[i], right[i])) != BitConverter.SingleToInt32Bits(result[i]))
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Avx)}.{nameof(Avx.Min)}<Single>: {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Reader.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Text;
using System.Reflection;
namespace MulticastUDP.packets {
public abstract class Packet {
private SortedDictionary<string, PropertyInfo> m_OrderedProperties = new SortedDictionary<string, PropertyInfo>();
private ConcurrentDictionary<string,int> m_PropertyIndexes = new ConcurrentDictionary<string,int>();
public const int MAX_STRING_LENGTH = 128;
public Guid PacketId {get;set;}
public string MachineName {get;set;}
public string UserName {get;set;}
public DateTime TimeStamp {get;set;}
public bool IsHeartBeat {get;set;}
public abstract PACKETTYPE PacketType { get; }
public abstract void setDefaultValues();
public enum PACKETTYPE {
AIRCRAFTPACKET,
UNKNOWN
}
public Packet() {
setBaseDefaultValues();
populateOrderedProperties();
}
public Packet(Packet thePacket) {
PacketId = thePacket.PacketId;
MachineName = thePacket.MachineName;
UserName = thePacket.UserName;
TimeStamp = thePacket.TimeStamp;
IsHeartBeat = thePacket.IsHeartBeat;
}
public Packet(bool isHeartBeat) {
setBaseDefaultValues();
IsHeartBeat = isHeartBeat;
populateOrderedProperties();
}
public Packet(byte[] theBytes){
setBaseDefaultValues();
populateOrderedProperties();
parseBytes(theBytes);
}
public string getFeedId() {
return UserName + "@" + MachineName + "." +PacketType.ToString();
}
private void setBaseDefaultValues() {
PacketId = Guid.NewGuid();
MachineName = Environment.MachineName;
UserName = Environment.UserName;
TimeStamp = DateTime.Now;
IsHeartBeat = false;
setDefaultValues();
}
public override string ToString() {
return "[" + TimeStamp + "]" + MachineName + ":" + UserName;
}
public void populateRow(ref DataGridViewRow theRow) {
foreach (PropertyInfo pi in GetType().GetProperties()) {
if (theRow.DataGridView.Columns.Contains(pi.Name)) {
theRow.Cells[pi.Name].Value = pi.GetValue(this,null);
}
}
}
public void createColumns(ref DataGridView theDGV) {
theDGV.Columns.Clear();
foreach (PropertyInfo pi in GetType().GetProperties()) {
DataGridViewColumn col = theDGV.Columns[theDGV.Columns.Add(pi.Name, pi.Name)];
col.ValueType = pi.PropertyType;
}
}
// Organizes all properties in name sorted order
// This is required because GetType().GetProperties() does not garuntee order
// and we are relying on the order being the same when converting to and from byte arrays
private void populateOrderedProperties() {
m_OrderedProperties = new SortedDictionary<string, PropertyInfo>();
foreach (PropertyInfo pi in GetType().GetProperties()) {
m_OrderedProperties.Add(pi.Name,pi);
}
int index = 4;
foreach(KeyValuePair<string, PropertyInfo> kvp in m_OrderedProperties) {
PropertyInfo pi = kvp.Value;
m_PropertyIndexes.TryAdd(pi.Name, index);
if (pi.PropertyType == typeof(string)) {index += MAX_STRING_LENGTH; }
else if (pi.PropertyType == typeof(double)) { index += 8; }
else if (pi.PropertyType == typeof(bool)) { index += 1; }
else if (pi.PropertyType == typeof(Int16)) { index += 2; }
else if (pi.PropertyType == typeof(Int32)) { index += 4; }
else if (pi.PropertyType == typeof(Int64)) { index += 8; }
else if (pi.PropertyType == typeof(int)) { index += 4; }
else if (pi.PropertyType == typeof(Guid)) { index += 16; }
else if (pi.PropertyType == typeof(PACKETTYPE)) { }
else if (pi.PropertyType == typeof(DateTime)) { index += 8; }
}
}
public void parseBytesForProperty(KeyValuePair<string, PropertyInfo> kvp, byte[] theBytes) {
try {
int index = m_PropertyIndexes[kvp.Value.Name];
if (index > theBytes.Length) { return; }
if (kvp.Value.PropertyType == typeof(string)) {
//Int32 stringLength = BitConverter.ToInt32(theBytes, index);
byte[] stringBytes = new byte[MAX_STRING_LENGTH];
Array.Copy(theBytes, index, stringBytes, 0, MAX_STRING_LENGTH);
String value = System.Text.Encoding.Default.GetString(stringBytes);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(double)) {
double value = BitConverter.ToDouble(theBytes,index);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(bool)) {
bool value = BitConverter.ToBoolean(theBytes,index);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(Int16)) {
Int16 value = BitConverter.Int16(theBytes,index);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(Int32)) {
Int32 value = BitConverter.Int32(theBytes,index);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(Int64)) {
Int64 value = BitConverter.Int64(theBytes,index);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(int)) {
int value = BitConverter.int(theBytes,index);
kvp.Value.SetValue(this,value,null);
}
else if (kvp.Value.PropertyType == typeof(Guid)) {
Int64 value = BitConverter.ToInt64(theBytes, index);
byte[] guidBytes = new byte[16];
Array.Copy(theBytes, index,guidBytes,0,16);
Guid id = new Guid(guidBytes);
kvp.Value.SetValue(this,id,null);
}
else if (kvp.Value.PropertyType == typeof(DateTime)) {
Int64 ticks = BitConverter.ToInt64(theBytes,index);
DateTime dt = new DateTime(ticks);
kvp.Value.SetValue(this,value,null);
}
}
catch(Exception ex){}
}
public void parseBytes(byte[] theBytes) {
Parallel.ForEach(m_OrderedProperties, item => pareseBytesForProperty(item, theBytes));
}
private void appendString(string theValue, ref List<Byte> theByteList){
try {
if (theValue == null) {
theValue = "";
theValue = theValue.PadRight(MAX_STRING_LENGTH);
}
else {
if (theValue.Length > MAX_STRING_LENGTH) theValue = theValue.Substring(0,MAX_STRING_LENGTH);
if (theValue.Length < MAX_STRING_LENGTH) theValue = theValue.PadRight(MAX_STRING_LENGTH);
}
byte[] stringBytes = System.Text.Encoding.ASCII.GetBytes(theValue);
theByteList.AddRange(stringBytes);
}
catch(Exception ex) {}
}
private void appendInt64(Int64 theValue, ref List<Bytes> theByteList) {
theByteList.AddRange(BitConverter.GetBytes(theValue));
}
private void appendInt32(Int32 theValue, ref List<Bytes> theByteList) {
theByteList.AddRange(BitConverter.GetBytes(theValue));
}
private void appendInt16(Int16 theValue, ref List<Bytes> theByteList) {
theByteList.AddRange(BitConverter.GetBytes(theValue));
}
private void appendDouble(Double theValue, ref List<Bytes> theByteList) {
theByteList.AddRange(BitConverter.GetBytes(theValue));
}
private void appendDouble(bool theValue, ref List<Bytes> theByteList) {
theByteList.AddRange(BitConverter.GetBytes(theValue));
}
private void appendDateTime(DateTime theValue, ref List<Bytes> theByteList) {
appendInt64(theValue.Ticks, ref theByteList);
}
private void appendGuid(Guid theValue, ref List<Bytes> theByteList) {
theByteList.AddRange(theValue.ToByteArray());
}
public string toCSVString() {
StringBuilder sBuilder = new StringBuilder();
foreach (KeyValuePair<string, PropertyInfo> kvp in m_OrderedProperties) {
PropertyInfo pi = kvp.Value;
sBuilder.Append(pi.GetValue(this,null).ToString());
sBuilder.Append(",");")
}
return sBuilder.ToString();
}
public byte[] toByteArray() {
List<Byte> byteList = new List<Byte>();
List<Byte> finalByteList = new List<Byte>();
byteList.AddRange(BitConverter.GetBytes((int)PacketType));
foreach (KeyValuePair<string, PropertyInfo> kvp in m_OrderedProperties) {
try {
PropertyInfo pi = kvp.Value;
if (pi.PropertyType == typeof(string)) { appendString((string)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(DateTime)) { appendDateTime((DateTime)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(double)) { appendDouble((double)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(bool)) { appendBool((bool)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(int)) { appendDateTime((int)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(long)) { appendDateTime((long)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(Int16)) { appendDateTime((Int16)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(Guid)) { appendDateTime((Guid)pi.GetValue(this,null),ref byteList); }
else if (pi.PropertyType == typeof(PACKETTYPE)) { /* DO NOTHING */ }
else {
// WARN Property Type Unknown
}
}
catch(Exception ex){
}
}
finalByteList.AddRange(byteList);
return finalByteList.ToArray<byte>();
}
public static Packet fromBytes(byte[] theBytes) {
Packet newPacket = null;
Packet.PACKETTYPE pType = Packet.decodePacketType(theBytes);
switch(pType) {
case Packet.PACKETTYPE.AIRCRAFTPACKET:
newPacket = new AircraftPAcket(theBytes);
break;
}
return newPacket;
}
public static PACKETTYPE decodePacketType(byte[] theBytes) {
PACKETTYPE pType = PACKETTYPE.UNKNOWN;
pType = (PACKETTYPE)BitConverter.ToInt32(theBytes,0);
return pType;
}
}
}
| |
using System.Diagnostics;
using System.IO;
using System.Xml;
using System.Xml.Serialization;
namespace nanoFramework.Tools.VisualStudio.Debugger
{
public class Pdbx
{
public class TokenMap
{
uint StringToUInt32(string s)
{
s.Trim();
if (s.StartsWith("0x"))
s = s.Remove(0, 2);
return uint.Parse(s, System.Globalization.NumberStyles.HexNumber);
}
string UInt32ToString(uint u)
{
return "0x" + u.ToString("X");
}
[XmlIgnore]public uint CLR;
[XmlIgnore]public uint nanoCLR;
[XmlElement("CLR")]
public string CLR_String
{
get {return UInt32ToString(CLR);}
set {CLR = StringToUInt32(value);}
}
[XmlElement("nanoCLR")]
public string nanoCLR_String
{
get {return UInt32ToString(nanoCLR);}
set {nanoCLR = StringToUInt32(value);}
}
}
public class Token : TokenMap
{
}
public class IL : TokenMap
{
}
public class ClassMember
{
public Token Token;
[XmlIgnore]public Class Class;
}
public class Method : ClassMember
{
public bool HasByteCode = true;
public IL[] ILMap;
private bool m_fIsJMC;
[XmlIgnore]
public bool IsJMC
{
get { return m_fIsJMC; }
set { if (CanSetJMC) m_fIsJMC = value; }
}
public bool CanSetJMC
{
get { return this.HasByteCode; }
}
}
public class Field : ClassMember
{
}
public class Class
{
public Token Token;
public Field[] Fields;
public Method[] Methods;
[XmlIgnore]public Assembly Assembly;
}
public class Assembly /*Module*/
{
public struct VersionStruct
{
public ushort Major;
public ushort Minor;
public ushort Build;
public ushort Revision;
}
public string FileName;
public VersionStruct Version;
public Token Token;
public Class[] Classes;
[XmlIgnore]public CorDebugAssembly CorDebugAssembly;
}
public class PdbxFile
{
public class Resolver
{
private string[] _assemblyPaths;
private string[] _assemblyDirectories;
public string[] AssemblyPaths
{
get { return _assemblyPaths; }
set { _assemblyPaths = value; }
}
public string[] AssemblyDirectories
{
get {return _assemblyDirectories;}
set {_assemblyDirectories = value;}
}
public PdbxFile Resolve(string name, Tools.Debugger.WireProtocol.Commands.DebuggingResolveAssembly.Version version, bool fIsTargetBigEndian)
{
PdbxFile file = PdbxFile.Open(name, version, _assemblyPaths, _assemblyDirectories, fIsTargetBigEndian);
return file;
}
}
public Assembly Assembly;
[XmlIgnore]public string PdbxPath;
private static PdbxFile TryPdbxFile(string path, Tools.Debugger.WireProtocol.Commands.DebuggingResolveAssembly.Version version)
{
try
{
path += ".pdbx";
if (File.Exists(path))
{
XmlSerializer xmls = new Serialization.PdbxFile.PdbxFileSerializer();
PdbxFile file = (PdbxFile)Utility.XmlDeserialize(path, xmls);
//Check version
Assembly.VersionStruct version2 = file.Assembly.Version;
if (version2.Major == version.MajorVersion && version2.Minor == version.MinorVersion)
{
file.Initialize(path);
return file;
}
}
}
catch
{
}
return null;
}
private static PdbxFile OpenHelper(string name, Tools.Debugger.WireProtocol.Commands.DebuggingResolveAssembly.Version version, string[] assemblyDirectories, string directorySuffix)
{
PdbxFile file = null;
for (int iDirectory = 0; iDirectory < assemblyDirectories.Length; iDirectory++)
{
string directory = assemblyDirectories[iDirectory];
if(!string.IsNullOrEmpty(directorySuffix))
{
directory = Path.Combine(directory, directorySuffix);
}
string pathNoExt = Path.Combine(directory, name);
if ((file = TryPdbxFile(pathNoExt, version)) != null)
break;
}
return file;
}
private static PdbxFile Open(string name, Tools.Debugger.WireProtocol.Commands.DebuggingResolveAssembly.Version version, string[] assemblyPaths, string[] assemblyDirectories, bool fIsTargetBigEndian)
{
PdbxFile file = null;
if (assemblyPaths != null)
{
for(int iPath = 0; iPath < assemblyPaths.Length; iPath++)
{
string path = assemblyPaths[iPath];
string pathNoExt = Path.ChangeExtension(path, null);
if (0 == string.Compare(name, Path.GetFileName(pathNoExt), true))
{
if ((file = TryPdbxFile(pathNoExt, version)) != null)
break;
}
}
}
if (file == null && assemblyDirectories != null)
{
file = OpenHelper(name, version, assemblyDirectories, null);
if (file == null)
{
if (fIsTargetBigEndian)
{
file = OpenHelper(name, version, assemblyDirectories, @"..\pe\be");
if (file == null)
{
file = OpenHelper(name, version, assemblyDirectories, @"be");
}
}
else
{
file = OpenHelper(name, version, assemblyDirectories, @"..\pe\le");
if (file == null)
{
file = OpenHelper(name, version, assemblyDirectories, @"le");
}
}
}
}
//Try other paths here...
return file;
}
private void Initialize(string path)
{
this.PdbxPath = path;
for(int iClass = 0; iClass < this.Assembly.Classes.Length; iClass++)
{
Class c = this.Assembly.Classes[iClass];
c.Assembly = this.Assembly;
for(int iMethod = 0; iMethod < c.Methods.Length; iMethod++)
{
Method m = c.Methods[iMethod];
m.Class = c;
#if DEBUG
for (int iIL = 0; iIL < m.ILMap.Length - 1; iIL++)
{
Debug.Assert(m.ILMap[iIL].CLR < m.ILMap[iIL + 1].CLR);
Debug.Assert(m.ILMap[iIL].nanoCLR < m.ILMap[iIL + 1].nanoCLR);
}
#endif
}
foreach (Field f in c.Fields)
{
f.Class = c;
}
}
}
/// Format of the Pdbx file
///
///<Pdbx>
/// <dat>
/// <filename>NAME</filename>
/// </dat>
/// <assemblies>
/// <assembly>
/// <name>NAME</name>
/// <version>
/// <Major>1</Major>
/// <Minor>2</Minor>
/// <Build>3</Build>
/// <Revision>4</Revision>
/// </version>
/// <token>
/// <CLR>TOKEN</CLR>
/// <nanoCLR>TOKEN</nanoCLR>
/// </token>
/// <classes>
/// <class>
/// <name>NAME</name>
/// <fields>
/// <field>
/// <token></token>
/// </field>
/// </fields>
/// <methods>
/// <method>
/// <token></token>
/// <ILMap>
/// <IL>
/// <CLR>IL</CLR>
/// <nanoCLR>IL</nanoCLR>
/// </IL>
/// </ILMap>
/// </method>
/// </methods>
/// </class>
/// </classes>
/// </assembly>
/// </assemblies>
///</Pdbx>
///
///
}
}
}
| |
using System;
using System.Collections.Generic;
using Contracts;
using DarkMultiPlayerCommon;
namespace DarkMultiPlayer
{
public class ScenarioWorker
{
public bool workerEnabled = false;
private List<string> warnedModules = new List<string>();
private Dictionary<string, string> checkData = new Dictionary<string, string>();
private Queue<ScenarioEntry> scenarioQueue = new Queue<ScenarioEntry>();
private bool blockScenarioDataSends = false;
private float lastScenarioSendTime = 0f;
private const float SEND_SCENARIO_DATA_INTERVAL = 30f;
//ScenarioType list to check.
private Dictionary<string, Type> allScenarioTypesInAssemblies;
//System.Reflection hackiness for loading kerbals into the crew roster:
private delegate bool AddCrewMemberToRosterDelegate(ProtoCrewMember pcm);
// Game hooks
private bool registered;
//Services
private DMPGame dmpGame;
private VesselWorker vesselWorker;
private ConfigNodeSerializer configNodeSerializer;
private NetworkWorker networkWorker;
/// <summary>
/// Methods to call before DMP loads a network scenario module. Returning true will count the message as handled and will prevent DMP from loading it.
/// </summary>
private Dictionary<string, Func<ConfigNode, bool>> beforeCallback = new Dictionary<string, Func<ConfigNode, bool>>();
/// <summary>
/// Methods to call after DMP loads a network scenario module
/// </summary>
private Dictionary<string, Action<ConfigNode>> afterCallback = new Dictionary<string, Action<ConfigNode>>();
private NamedAction updateAction;
public ScenarioWorker(DMPGame dmpGame, VesselWorker vesselWorker, ConfigNodeSerializer configNodeSerializer, NetworkWorker networkWorker)
{
this.dmpGame = dmpGame;
this.vesselWorker = vesselWorker;
this.configNodeSerializer = configNodeSerializer;
this.networkWorker = networkWorker;
updateAction = new NamedAction(Update);
dmpGame.updateEvent.Add(updateAction);
}
public void RegisterBeforeCallback(string moduleName, Func<ConfigNode, bool> callback)
{
beforeCallback[moduleName] = callback;
}
public void RegisterAfterCallback(string moduleName, Action<ConfigNode> callback)
{
afterCallback[moduleName] = callback;
}
private void RegisterGameHooks()
{
registered = true;
GameEvents.Contract.onAccepted.Add(OnContractAccepted);
GameEvents.OnFundsChanged.Add(OnFundsChanged);
GameEvents.OnTechnologyResearched.Add(OnTechnologyResearched);
GameEvents.OnScienceRecieved.Add(OnScienceRecieved);
GameEvents.OnScienceChanged.Add(OnScienceChanged);
GameEvents.OnReputationChanged.Add(OnReputationChanged);
RegisterAfterCallback("Funding", FundingCallback);
RegisterAfterCallback("ResearchAndDevelopment", ResearchAndDevelopmentCallback);
RegisterAfterCallback("Reputation", ReputationCallback);
}
private void UnregisterGameHooks()
{
registered = false;
GameEvents.Contract.onAccepted.Remove(OnContractAccepted);
GameEvents.OnFundsChanged.Remove(OnFundsChanged);
GameEvents.OnTechnologyResearched.Remove(OnTechnologyResearched);
GameEvents.OnScienceRecieved.Remove(OnScienceRecieved);
GameEvents.OnScienceChanged.Remove(OnScienceChanged);
GameEvents.OnReputationChanged.Remove(OnReputationChanged);
}
//Events so we can quickly send our changed modules
private void OnReputationChanged(float data0, TransactionReasons data1)
{
SendScenarioModules(false);
}
private void OnScienceChanged(float data0, TransactionReasons data1)
{
SendScenarioModules(false);
}
private void OnScienceRecieved(float data0, ScienceSubject data1, ProtoVessel data2, bool data3)
{
SendScenarioModules(false);
}
private void OnTechnologyResearched(GameEvents.HostTargetAction<RDTech, RDTech.OperationResult> data)
{
SendScenarioModules(false);
}
private void OnFundsChanged(double newValue, TransactionReasons reason)
{
SendScenarioModules(false);
}
//Callbacks for UI
private void FundingCallback(ConfigNode configNode)
{
GameEvents.OnFundsChanged.Fire(Funding.Instance.Funds, TransactionReasons.None);
}
private void ResearchAndDevelopmentCallback(ConfigNode configNode)
{
GameEvents.OnScienceChanged.Fire(ResearchAndDevelopment.Instance.Science, TransactionReasons.None);
}
private void ReputationCallback(ConfigNode configNode)
{
GameEvents.OnReputationChanged.Fire(Reputation.CurrentRep, TransactionReasons.None);
}
//Kerbal fixups
private void OnContractAccepted(Contract contract)
{
DarkLog.Debug("Contract accepted, state: " + contract.ContractState);
ConfigNode contractNode = new ConfigNode();
contract.Save(contractNode);
if (contractNode.GetValue("type") == "RecoverAsset")
{
string kerbalName = contractNode.GetValue("kerbalName").Trim();
uint partID = uint.Parse(contractNode.GetValue("partID"));
if (!string.IsNullOrEmpty(kerbalName))
{
ProtoCrewMember rescueKerbal = null;
if (!HighLogic.CurrentGame.CrewRoster.Exists(kerbalName))
{
DarkLog.Debug("Generating missing kerbal " + kerbalName + " for rescue contract");
int kerbalGender = int.Parse(contractNode.GetValue("gender"));
rescueKerbal = HighLogic.CurrentGame.CrewRoster.GetNewKerbal(ProtoCrewMember.KerbalType.Unowned);
rescueKerbal.ChangeName(kerbalName);
rescueKerbal.gender = (ProtoCrewMember.Gender)kerbalGender;
rescueKerbal.rosterStatus = ProtoCrewMember.RosterStatus.Assigned;
}
else
{
rescueKerbal = HighLogic.CurrentGame.CrewRoster[kerbalName];
DarkLog.Debug("Kerbal " + kerbalName + " already exists, skipping respawn");
}
if (rescueKerbal != null) vesselWorker.SendKerbalIfDifferent(rescueKerbal);
}
if (partID != 0)
{
Vessel contractVessel = FinePrint.Utilities.VesselUtilities.FindVesselWithPartIDs(new List<uint> { partID });
if (contractVessel != null) vesselWorker.SendVesselUpdateIfNeeded(contractVessel, true);
}
}
else if (contractNode.GetValue("type") == "TourismContract")
{
string tourists = contractNode.GetValue("tourists");
if (tourists != null)
{
string[] touristsNames = tourists.Split(new char[] { '|' });
foreach (string touristName in touristsNames)
{
ProtoCrewMember pcm = null;
if (!HighLogic.CurrentGame.CrewRoster.Exists(touristName))
{
DarkLog.Debug("Spawning missing tourist " + touristName + " for tourism contract");
pcm = HighLogic.CurrentGame.CrewRoster.GetNewKerbal(ProtoCrewMember.KerbalType.Tourist);
pcm.rosterStatus = ProtoCrewMember.RosterStatus.Available;
pcm.ChangeName(touristName);
}
else
{
DarkLog.Debug("Skipped respawn of existing tourist " + touristName);
pcm = HighLogic.CurrentGame.CrewRoster[touristName];
}
if (pcm != null) vesselWorker.SendKerbalIfDifferent(pcm);
}
}
}
}
private void Update()
{
if (workerEnabled)
{
if (!registered) RegisterGameHooks();
if (!blockScenarioDataSends)
{
if ((Client.realtimeSinceStartup - lastScenarioSendTime) > SEND_SCENARIO_DATA_INTERVAL)
{
SendScenarioModules(false);
}
lock (scenarioQueue)
{
while (scenarioQueue.Count > 0)
{
ScenarioEntry se = scenarioQueue.Dequeue();
LoadScenarioData(se);
}
}
}
}
else
{
if (registered) UnregisterGameHooks();
}
}
private void LoadScenarioTypes()
{
allScenarioTypesInAssemblies = new Dictionary<string, Type>();
foreach (AssemblyLoader.LoadedAssembly something in AssemblyLoader.loadedAssemblies)
{
foreach (Type scenarioType in something.assembly.GetTypes())
{
if (scenarioType.IsSubclassOf(typeof(ScenarioModule)))
{
if (!allScenarioTypesInAssemblies.ContainsKey(scenarioType.Name))
{
allScenarioTypesInAssemblies.Add(scenarioType.Name, scenarioType);
}
}
}
}
}
private bool IsScenarioModuleAllowed(string scenarioName)
{
if (scenarioName == null)
{
return false;
}
//Blacklist asteroid module from every game mode
if (scenarioName == "DiscoverableObjects")
{
//We hijack this and enable / disable it if we need to.
return false;
}
if (allScenarioTypesInAssemblies == null)
{
//Load type dictionary on first use
LoadScenarioTypes();
}
if (!allScenarioTypesInAssemblies.ContainsKey(scenarioName))
{
//Module missing
return false;
}
Type scenarioType = allScenarioTypesInAssemblies[scenarioName];
KSPScenario[] scenarioAttributes = (KSPScenario[])scenarioType.GetCustomAttributes(typeof(KSPScenario), true);
if (scenarioAttributes.Length > 0)
{
KSPScenario attribute = scenarioAttributes[0];
bool protoAllowed = false;
if (HighLogic.CurrentGame.Mode == Game.Modes.CAREER)
{
protoAllowed = protoAllowed || attribute.HasCreateOption(ScenarioCreationOptions.AddToExistingCareerGames);
protoAllowed = protoAllowed || attribute.HasCreateOption(ScenarioCreationOptions.AddToNewCareerGames);
}
if (HighLogic.CurrentGame.Mode == Game.Modes.SCIENCE_SANDBOX)
{
protoAllowed = protoAllowed || attribute.HasCreateOption(ScenarioCreationOptions.AddToExistingScienceSandboxGames);
protoAllowed = protoAllowed || attribute.HasCreateOption(ScenarioCreationOptions.AddToNewScienceSandboxGames);
}
if (HighLogic.CurrentGame.Mode == Game.Modes.SANDBOX)
{
protoAllowed = protoAllowed || attribute.HasCreateOption(ScenarioCreationOptions.AddToExistingSandboxGames);
protoAllowed = protoAllowed || attribute.HasCreateOption(ScenarioCreationOptions.AddToNewSandboxGames);
}
return protoAllowed;
}
//Scenario is not marked with KSPScenario - let's load it anyway.
return true;
}
public void SendScenarioModules(bool highPriority)
{
lastScenarioSendTime = Client.realtimeSinceStartup;
List<string> scenarioName = new List<string>();
List<ByteArray> scenarioData = new List<ByteArray>();
foreach (ScenarioModule sm in ScenarioRunner.GetLoadedModules())
{
string scenarioType = sm.GetType().Name;
if (!IsScenarioModuleAllowed(scenarioType))
{
continue;
}
try
{
ConfigNode scenarioNode = new ConfigNode();
sm.Save(scenarioNode);
ByteArray scenarioBytes = configNodeSerializer.Serialize(scenarioNode);
string scenarioHash = Common.CalculateSHA256Hash(scenarioBytes);
if (scenarioBytes.Length == 0)
{
DarkLog.Debug("Error writing scenario data for " + scenarioType);
ByteRecycler.ReleaseObject(scenarioBytes);
continue;
}
if (checkData.ContainsKey(scenarioType) ? (checkData[scenarioType] == scenarioHash) : false)
{
//Data is the same since last time - Skip it.
ByteRecycler.ReleaseObject(scenarioBytes);
continue;
}
else
{
checkData[scenarioType] = scenarioHash;
}
scenarioName.Add(scenarioType);
scenarioData.Add(scenarioBytes);
}
catch (Exception e)
{
string fullName = sm.GetType().FullName;
if (!warnedModules.Contains(fullName))
{
DarkLog.Debug("Unable to save module data from " + fullName + ", skipping upload of this module. Exception: " + e);
warnedModules.Add(fullName);
if (!fullName.Contains("Expansions.Serenity.DeployedScience"))
{
ScreenMessages.PostScreenMessage("DMP was unable to save " + fullName + ", this module data will be lost.", 30f, ScreenMessageStyle.UPPER_CENTER);
}
}
}
}
if (scenarioName.Count > 0)
{
if (highPriority)
{
networkWorker.SendScenarioModuleDataHighPriority(scenarioName.ToArray(), scenarioData.ToArray());
}
else
{
networkWorker.SendScenarioModuleData(scenarioName.ToArray(), scenarioData.ToArray());
}
}
}
public void LoadScenarioDataIntoGame()
{
lock (scenarioQueue)
{
while (scenarioQueue.Count > 0)
{
ScenarioEntry scenarioEntry = scenarioQueue.Dequeue();
if (scenarioEntry.scenarioName == "ProgressTracking")
{
CreateMissingKerbalsInProgressTrackingSoTheGameDoesntBugOut(scenarioEntry.scenarioNode);
}
CheckForBlankSceneSoTheGameDoesntBugOut(scenarioEntry);
ProtoScenarioModule psm = new ProtoScenarioModule(scenarioEntry.scenarioNode);
if (psm != null)
{
if (IsScenarioModuleAllowed(psm.moduleName))
{
DarkLog.Debug("Loading " + psm.moduleName + " scenario data");
HighLogic.CurrentGame.scenarios.Add(psm);
ByteArray scenarioHashBytes = configNodeSerializer.Serialize(scenarioEntry.scenarioNode);
checkData[scenarioEntry.scenarioName] = Common.CalculateSHA256Hash(scenarioHashBytes);
ByteRecycler.ReleaseObject(scenarioHashBytes);
}
else
{
DarkLog.Debug("Skipping " + psm.moduleName + " scenario data in " + dmpGame.gameMode + " mode");
}
}
}
}
}
private ConfigNode CreateProcessedPartNode(string part, uint id, params ProtoCrewMember[] crew)
{
ConfigNode configNode = ProtoVessel.CreatePartNode(part, id, crew);
if (part != "kerbalEVA")
{
ConfigNode[] nodes = configNode.GetNodes("RESOURCE");
for (int i = 0; i < nodes.Length; i++)
{
ConfigNode configNode2 = nodes[i];
if (configNode2.HasValue("amount"))
{
configNode2.SetValue("amount", 0.ToString(System.Globalization.CultureInfo.InvariantCulture), false);
}
}
}
configNode.SetValue("flag", "Squad/Flags/default", true);
return configNode;
}
//Defends against bug #172
private void CreateMissingKerbalsInProgressTrackingSoTheGameDoesntBugOut(ConfigNode progressTrackingNode)
{
foreach (ConfigNode possibleNode in progressTrackingNode.nodes)
{
//Recursion (noun): See Recursion.
CreateMissingKerbalsInProgressTrackingSoTheGameDoesntBugOut(possibleNode);
}
//The kerbals are kept in a ConfigNode named 'crew', with 'crews' as a comma space delimited array of names.
if (progressTrackingNode.name == "crew")
{
string kerbalNames = progressTrackingNode.GetValue("crews");
if (!String.IsNullOrEmpty(kerbalNames))
{
string[] kerbalNamesSplit = kerbalNames.Split(new string[] { ", " }, StringSplitOptions.RemoveEmptyEntries);
foreach (string kerbalName in kerbalNamesSplit)
{
if (!HighLogic.CurrentGame.CrewRoster.Exists(kerbalName))
{
DarkLog.Debug("Generating missing kerbal from ProgressTracking: " + kerbalName);
ProtoCrewMember pcm = CrewGenerator.RandomCrewMemberPrototype(ProtoCrewMember.KerbalType.Crew);
pcm.ChangeName(kerbalName);
HighLogic.CurrentGame.CrewRoster.AddCrewMember(pcm);
//Also send it off to the server
vesselWorker.SendKerbalIfDifferent(pcm);
}
}
}
}
}
//If the scene field is blank, KSP will throw an error while starting the game, meaning players will be unable to join the server.
private void CheckForBlankSceneSoTheGameDoesntBugOut(ScenarioEntry scenarioEntry)
{
if (scenarioEntry.scenarioNode.GetValue("scene") == string.Empty)
{
string nodeName = scenarioEntry.scenarioName;
ScreenMessages.PostScreenMessage(nodeName + " is badly behaved!");
DarkLog.Debug(nodeName + " is badly behaved!");
scenarioEntry.scenarioNode.SetValue("scene", "7, 8, 5, 6, 9");
}
}
public void UpgradeTheAstronautComplexSoTheGameDoesntBugOut()
{
ProtoScenarioModule sm = HighLogic.CurrentGame.scenarios.Find(psm => psm.moduleName == "ScenarioUpgradeableFacilities");
if (sm != null)
{
if (ScenarioUpgradeableFacilities.protoUpgradeables.ContainsKey("SpaceCenter/AstronautComplex"))
{
foreach (Upgradeables.UpgradeableFacility uf in ScenarioUpgradeableFacilities.protoUpgradeables["SpaceCenter/AstronautComplex"].facilityRefs)
{
DarkLog.Debug("Setting astronaut complex to max level");
uf.SetLevel(uf.MaxLevel);
}
}
}
}
public void LoadMissingScenarioDataIntoGame()
{
List<KSPScenarioType> validScenarios = KSPScenarioType.GetAllScenarioTypesInAssemblies();
foreach (KSPScenarioType validScenario in validScenarios)
{
if (HighLogic.CurrentGame.scenarios.Exists(psm => psm.moduleName == validScenario.ModuleType.Name))
{
continue;
}
bool loadModule = false;
if (HighLogic.CurrentGame.Mode == Game.Modes.CAREER)
{
loadModule = validScenario.ScenarioAttributes.HasCreateOption(ScenarioCreationOptions.AddToNewCareerGames);
}
if (HighLogic.CurrentGame.Mode == Game.Modes.SCIENCE_SANDBOX)
{
loadModule = validScenario.ScenarioAttributes.HasCreateOption(ScenarioCreationOptions.AddToNewScienceSandboxGames);
}
if (HighLogic.CurrentGame.Mode == Game.Modes.SANDBOX)
{
loadModule = validScenario.ScenarioAttributes.HasCreateOption(ScenarioCreationOptions.AddToNewSandboxGames);
}
if (loadModule)
{
DarkLog.Debug("Creating new scenario module " + validScenario.ModuleType.Name);
HighLogic.CurrentGame.AddProtoScenarioModule(validScenario.ModuleType, validScenario.ScenarioAttributes.TargetScenes);
}
}
}
public void LoadScenarioData(ScenarioEntry entry)
{
if (!IsScenarioModuleAllowed(entry.scenarioName))
{
DarkLog.Debug("Skipped '" + entry.scenarioName + "' scenario data in " + dmpGame.gameMode + " mode");
return;
}
//Load data from DMP
if (entry.scenarioNode == null)
{
DarkLog.Debug(entry.scenarioName + " scenario data failed to create a ConfigNode!");
ScreenMessages.PostScreenMessage("Scenario " + entry.scenarioName + " failed to load, blocking scenario uploads.", 10f, ScreenMessageStyle.UPPER_CENTER);
blockScenarioDataSends = true;
return;
}
//Load data into game
if (DidScenarioChange(entry))
{
bool loaded = false;
ByteArray scenarioBytes = configNodeSerializer.Serialize(entry.scenarioNode);
checkData[entry.scenarioName] = Common.CalculateSHA256Hash(scenarioBytes);
ByteRecycler.ReleaseObject(scenarioBytes);
foreach (ProtoScenarioModule psm in HighLogic.CurrentGame.scenarios)
{
if (psm.moduleName == entry.scenarioName)
{
DarkLog.Debug("Loading existing " + entry.scenarioName + " scenario module");
try
{
if (psm.moduleRef == null)
{
DarkLog.Debug("Fixing null scenario module!");
psm.moduleRef = new ScenarioModule();
}
bool skipLoad = false;
if (beforeCallback.ContainsKey(psm.moduleName))
{
skipLoad = beforeCallback[psm.moduleName](entry.scenarioNode);
}
if (!skipLoad)
{
psm.moduleRef.Load(entry.scenarioNode);
}
if (afterCallback.ContainsKey(psm.moduleName))
{
afterCallback[psm.moduleName](entry.scenarioNode);
}
}
catch (Exception e)
{
DarkLog.Debug("Error loading " + entry.scenarioName + " scenario module, Exception: " + e);
blockScenarioDataSends = true;
}
loaded = true;
}
}
if (!loaded)
{
DarkLog.Debug("Loading new " + entry.scenarioName + " scenario module");
LoadNewScenarioData(entry.scenarioNode);
}
}
}
public void LoadNewScenarioData(ConfigNode newScenarioData)
{
ProtoScenarioModule newModule = new ProtoScenarioModule(newScenarioData);
try
{
HighLogic.CurrentGame.scenarios.Add(newModule);
newModule.Load(ScenarioRunner.Instance);
}
catch
{
DarkLog.Debug("Error loading scenario data!");
blockScenarioDataSends = true;
}
}
public void QueueScenarioData(string scenarioName, ConfigNode scenarioData)
{
lock (scenarioQueue)
{
ScenarioEntry entry = new ScenarioEntry();
entry.scenarioName = scenarioName;
entry.scenarioNode = scenarioData;
scenarioQueue.Enqueue(entry);
}
}
private bool DidScenarioChange(ScenarioEntry scenarioEntry)
{
string previousScenarioHash = null;
ByteArray scenarioBytes = configNodeSerializer.Serialize(scenarioEntry.scenarioNode);
string currentScenarioHash = Common.CalculateSHA256Hash(scenarioBytes);
ByteRecycler.ReleaseObject(scenarioBytes);
if (checkData.TryGetValue(scenarioEntry.scenarioName, out previousScenarioHash))
{
return previousScenarioHash != currentScenarioHash;
}
return true;
}
public void Stop()
{
workerEnabled = false;
dmpGame.updateEvent.Remove(updateAction);
}
}
public class ScenarioEntry
{
public string scenarioName;
public ConfigNode scenarioNode;
}
}
| |
#define SQLITE_ASCII
#define SQLITE_DISABLE_LFS
#define SQLITE_ENABLE_OVERSIZE_CELL_CHECK
#define SQLITE_MUTEX_OMIT
#define SQLITE_OMIT_AUTHORIZATION
#define SQLITE_OMIT_DEPRECATED
#define SQLITE_OMIT_GET_TABLE
#define SQLITE_OMIT_INCRBLOB
#define SQLITE_OMIT_LOOKASIDE
#define SQLITE_OMIT_SHARED_CACHE
#define SQLITE_OMIT_UTF16
#define SQLITE_OMIT_WAL
#define SQLITE_OS_WIN
#define SQLITE_SYSTEM_MALLOC
#define VDBE_PROFILE_OFF
#define WINDOWS_MOBILE
#define NDEBUG
#define _MSC_VER
#define YYFALLBACK
using System;
using System.Diagnostics;
using System.Text;
using i64 = System.Int64;
using u8 = System.Byte;
using u32 = System.UInt32;
using Pgno = System.UInt32;
namespace Community.CsharpSqlite
{
using sqlite3_int64 = System.Int64;
public partial class Sqlite3
{
/*
** 2008 December 3
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
*************************************************************************
**
** This module implements an object we call a "RowSet".
**
** The RowSet object is a collection of rowids. Rowids
** are inserted into the RowSet in an arbitrary order. Inserts
** can be intermixed with tests to see if a given rowid has been
** previously inserted into the RowSet.
**
** After all inserts are finished, it is possible to extract the
** elements of the RowSet in sorted order. Once this extraction
** process has started, no new elements may be inserted.
**
** Hence, the primitive operations for a RowSet are:
**
** CREATE
** INSERT
** TEST
** SMALLEST
** DESTROY
**
** The CREATE and DESTROY primitives are the constructor and destructor,
** obviously. The INSERT primitive adds a new element to the RowSet.
** TEST checks to see if an element is already in the RowSet. SMALLEST
** extracts the least value from the RowSet.
**
** The INSERT primitive might allocate additional memory. Memory is
** allocated in chunks so most INSERTs do no allocation. There is an
** upper bound on the size of allocated memory. No memory is freed
** until DESTROY.
**
** The TEST primitive includes a "batch" number. The TEST primitive
** will only see elements that were inserted before the last change
** in the batch number. In other words, if an INSERT occurs between
** two TESTs where the TESTs have the same batch nubmer, then the
** value added by the INSERT will not be visible to the second TEST.
** The initial batch number is zero, so if the very first TEST contains
** a non-zero batch number, it will see all prior INSERTs.
**
** No INSERTs may occurs after a SMALLEST. An assertion will fail if
** that is attempted.
**
** The cost of an INSERT is roughly constant. (Sometime new memory
** has to be allocated on an INSERT.) The cost of a TEST with a new
** batch number is O(NlogN) where N is the number of elements in the RowSet.
** The cost of a TEST using the same batch number is O(logN). The cost
** of the first SMALLEST is O(NlogN). Second and subsequent SMALLEST
** primitives are constant time. The cost of DESTROY is O(N).
**
** There is an added cost of O(N) when switching between TEST and
** SMALLEST primitives.
*************************************************************************
** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart
** C#-SQLite is an independent reimplementation of the SQLite software library
**
** SQLITE_SOURCE_ID: 2010-08-23 18:52:01 42537b60566f288167f1b5864a5435986838e3a3
**
*************************************************************************
*/
//#include "sqliteInt.h"
/*
** Target size for allocation chunks.
*/
//#define ROWSET_ALLOCATION_SIZE 1024
const int ROWSET_ALLOCATION_SIZE = 1024;
/*
** The number of rowset entries per allocation chunk.
*/
//#define ROWSET_ENTRY_PER_CHUNK \
// ((ROWSET_ALLOCATION_SIZE-8)/sizeof(struct RowSetEntry))
const int ROWSET_ENTRY_PER_CHUNK = 63;
/*
** Each entry in a RowSet is an instance of the following object.
*/
public class RowSetEntry
{
public i64 v; /* ROWID value for this entry */
public RowSetEntry pRight; /* Right subtree (larger entries) or list */
public RowSetEntry pLeft; /* Left subtree (smaller entries) */
};
/*
** Index entries are allocated in large chunks (instances of the
** following structure) to reduce memory allocation overhead. The
** chunks are kept on a linked list so that they can be deallocated
** when the RowSet is destroyed.
*/
public class RowSetChunk
{
public RowSetChunk pNextChunk; /* Next chunk on list of them all */
public RowSetEntry[] aEntry = new RowSetEntry[ROWSET_ENTRY_PER_CHUNK]; /* Allocated entries */
};
/*
** A RowSet in an instance of the following structure.
**
** A typedef of this structure if found in sqliteInt.h.
*/
public class RowSet
{
public RowSetChunk pChunk; /* List of all chunk allocations */
public sqlite3 db; /* The database connection */
public RowSetEntry pEntry; /* /* List of entries using pRight */
public RowSetEntry pLast; /* Last entry on the pEntry list */
public RowSetEntry[] pFresh; /* Source of new entry objects */
public RowSetEntry pTree; /* Binary tree of entries */
public int nFresh; /* Number of objects on pFresh */
public bool isSorted; /* True if pEntry is sorted */
public u8 iBatch; /* Current insert batch */
public RowSet( sqlite3 db, int N )
{
this.pChunk = null;
this.db = db;
this.pEntry = null;
this.pLast = null;
this.pFresh = new RowSetEntry[N];
this.pTree = null;
this.nFresh = N;
this.isSorted = true;
this.iBatch = 0;
}
};
/*
** Turn bulk memory into a RowSet object. N bytes of memory
** are available at pSpace. The db pointer is used as a memory context
** for any subsequent allocations that need to occur.
** Return a pointer to the new RowSet object.
**
** It must be the case that N is sufficient to make a Rowset. If not
** an assertion fault occurs.
**
** If N is larger than the minimum, use the surplus as an initial
** allocation of entries available to be filled.
*/
static RowSet sqlite3RowSetInit( sqlite3 db, object pSpace, u32 N )
{
RowSet p = new RowSet( db, (int)N );
//Debug.Assert(N >= ROUND8(sizeof(*p)) );
// p = pSpace;
// p.pChunk = 0;
// p.db = db;
// p.pEntry = 0;
// p.pLast = 0;
// p.pTree = 0;
// p.pFresh =(struct RowSetEntry*)(ROUND8(sizeof(*p)) + (char*)p);
// p.nFresh = (u16)((N - ROUND8(sizeof(*p)))/sizeof(struct RowSetEntry));
// p.isSorted = 1;
// p.iBatch = 0;
return p;
}
/*
** Deallocate all chunks from a RowSet. This frees all memory that
** the RowSet has allocated over its lifetime. This routine is
** the destructor for the RowSet.
*/
static void sqlite3RowSetClear( RowSet p )
{
RowSetChunk pChunk, pNextChunk;
for ( pChunk = p.pChunk; pChunk != null; pChunk = pNextChunk )
{
pNextChunk = pChunk.pNextChunk;
sqlite3DbFree( p.db, ref pChunk );
}
p.pChunk = null;
p.nFresh = 0;
p.pEntry = null;
p.pLast = null;
p.pTree = null;
p.isSorted = true;
}
/*
** Insert a new value into a RowSet.
**
** The mallocFailed flag of the database connection is set if a
** memory allocation fails.
*/
static void sqlite3RowSetInsert( RowSet p, i64 rowid )
{
RowSetEntry pEntry; /* The new entry */
RowSetEntry pLast; /* The last prior entry */
Debug.Assert( p != null );
if ( p.nFresh == 0 )
{
RowSetChunk pNew;
pNew = new RowSetChunk();//sqlite3DbMallocRaw(p.db, sizeof(*pNew));
if ( pNew == null )
{
return;
}
pNew.pNextChunk = p.pChunk;
p.pChunk = pNew;
p.pFresh = pNew.aEntry;
p.nFresh = ROWSET_ENTRY_PER_CHUNK;
}
p.pFresh[p.pFresh.Length - p.nFresh] = new RowSetEntry();
pEntry = p.pFresh[p.pFresh.Length - p.nFresh];
p.nFresh--;
pEntry.v = rowid;
pEntry.pRight = null;
pLast = p.pLast;
if ( pLast != null )
{
if ( p.isSorted && rowid <= pLast.v )
{
p.isSorted = false;
}
pLast.pRight = pEntry;
}
else
{
Debug.Assert( p.pEntry == null );/* Fires if INSERT after SMALLEST */
p.pEntry = pEntry;
}
p.pLast = pEntry;
}
/*
** Merge two lists of RowSetEntry objects. Remove duplicates.
**
** The input lists are connected via pRight pointers and are
** assumed to each already be in sorted order.
*/
static RowSetEntry rowSetMerge(
RowSetEntry pA, /* First sorted list to be merged */
RowSetEntry pB /* Second sorted list to be merged */
)
{
RowSetEntry head = new RowSetEntry();
RowSetEntry pTail;
pTail = head;
while ( pA != null && pB != null )
{
Debug.Assert( pA.pRight == null || pA.v <= pA.pRight.v );
Debug.Assert( pB.pRight == null || pB.v <= pB.pRight.v );
if ( pA.v < pB.v )
{
pTail.pRight = pA;
pA = pA.pRight;
pTail = pTail.pRight;
}
else if ( pB.v < pA.v )
{
pTail.pRight = pB;
pB = pB.pRight;
pTail = pTail.pRight;
}
else
{
pA = pA.pRight;
}
}
if ( pA != null )
{
Debug.Assert( pA.pRight == null || pA.v <= pA.pRight.v );
pTail.pRight = pA;
}
else
{
Debug.Assert( pB == null || pB.pRight == null || pB.v <= pB.pRight.v );
pTail.pRight = pB;
}
return head.pRight;
}
/*
** Sort all elements on the pEntry list of the RowSet into ascending order.
*/
static void rowSetSort( RowSet p )
{
u32 i;
RowSetEntry pEntry;
RowSetEntry[] aBucket = new RowSetEntry[40];
Debug.Assert( p.isSorted == false );
//memset(aBucket, 0, sizeof(aBucket));
while ( p.pEntry != null )
{
pEntry = p.pEntry;
p.pEntry = pEntry.pRight;
pEntry.pRight = null;
for ( i = 0; aBucket[i] != null; i++ )
{
pEntry = rowSetMerge( aBucket[i], pEntry );
aBucket[i] = null;
}
aBucket[i] = pEntry;
}
pEntry = null;
for ( i = 0; i < aBucket.Length; i++ )//sizeof(aBucket)/sizeof(aBucket[0])
{
pEntry = rowSetMerge( pEntry, aBucket[i] );
}
p.pEntry = pEntry;
p.pLast = null;
p.isSorted = true;
}
/*
** The input, pIn, is a binary tree (or subtree) of RowSetEntry objects.
** Convert this tree into a linked list connected by the pRight pointers
** and return pointers to the first and last elements of the new list.
*/
static void rowSetTreeToList(
RowSetEntry pIn, /* Root of the input tree */
ref RowSetEntry ppFirst, /* Write head of the output list here */
ref RowSetEntry ppLast /* Write tail of the output list here */
)
{
Debug.Assert( pIn != null );
if ( pIn.pLeft != null )
{
RowSetEntry p = new RowSetEntry();
rowSetTreeToList( pIn.pLeft, ref ppFirst, ref p );
p.pRight = pIn;
}
else
{
ppFirst = pIn;
}
if ( pIn.pRight != null )
{
rowSetTreeToList( pIn.pRight, ref pIn.pRight, ref ppLast );
}
else
{
ppLast = pIn;
}
Debug.Assert( ( ppLast ).pRight == null );
}
/*
** Convert a sorted list of elements (connected by pRight) into a binary
** tree with depth of iDepth. A depth of 1 means the tree contains a single
** node taken from the head of *ppList. A depth of 2 means a tree with
** three nodes. And so forth.
**
** Use as many entries from the input list as required and update the
** *ppList to point to the unused elements of the list. If the input
** list contains too few elements, then construct an incomplete tree
** and leave *ppList set to NULL.
**
** Return a pointer to the root of the constructed binary tree.
*/
static RowSetEntry rowSetNDeepTree(
ref RowSetEntry ppList,
int iDepth
)
{
RowSetEntry p; /* Root of the new tree */
RowSetEntry pLeft; /* Left subtree */
if ( ppList == null )
{
return null;
}
if ( iDepth == 1 )
{
p = ppList;
ppList = p.pRight;
p.pLeft = p.pRight = null;
return p;
}
pLeft = rowSetNDeepTree( ref ppList, iDepth - 1 );
p = ppList;
if ( p == null )
{
return pLeft;
}
p.pLeft = pLeft;
ppList = p.pRight;
p.pRight = rowSetNDeepTree( ref ppList, iDepth - 1 );
return p;
}
/*
** Convert a sorted list of elements into a binary tree. Make the tree
** as deep as it needs to be in order to contain the entire list.
*/
static RowSetEntry rowSetListToTree( RowSetEntry pList )
{
int iDepth; /* Depth of the tree so far */
RowSetEntry p; /* Current tree root */
RowSetEntry pLeft; /* Left subtree */
Debug.Assert( pList != null );
p = pList;
pList = p.pRight;
p.pLeft = p.pRight = null;
for ( iDepth = 1; pList != null; iDepth++ )
{
pLeft = p;
p = pList;
pList = p.pRight;
p.pLeft = pLeft;
p.pRight = rowSetNDeepTree( ref pList, iDepth );
}
return p;
}
/*
** Convert the list in p.pEntry into a sorted list if it is not
** sorted already. If there is a binary tree on p.pTree, then
** convert it into a list too and merge it into the p.pEntry list.
*/
static void rowSetToList( RowSet p )
{
if ( !p.isSorted )
{
rowSetSort( p );
}
if ( p.pTree != null )
{
RowSetEntry pHead = new RowSetEntry();
RowSetEntry pTail = new RowSetEntry();
rowSetTreeToList( p.pTree, ref pHead, ref pTail );
p.pTree = null;
p.pEntry = rowSetMerge( p.pEntry, pHead );
}
}
/*
** Extract the smallest element from the RowSet.
** Write the element into *pRowid. Return 1 on success. Return
** 0 if the RowSet is already empty.
**
** After this routine has been called, the sqlite3RowSetInsert()
** routine may not be called again.
*/
static int sqlite3RowSetNext( RowSet p, ref i64 pRowid )
{
rowSetToList( p );
if ( p.pEntry != null )
{
pRowid = p.pEntry.v;
p.pEntry = p.pEntry.pRight;
if ( p.pEntry == null )
{
sqlite3RowSetClear( p );
}
return 1;
}
else
{
return 0;
}
}
/*
** Check to see if element iRowid was inserted into the the rowset as
** part of any insert batch prior to iBatch. Return 1 or 0.
*/
static int sqlite3RowSetTest( RowSet pRowSet, u8 iBatch, sqlite3_int64 iRowid )
{
RowSetEntry p;
if ( iBatch != pRowSet.iBatch )
{
if ( pRowSet.pEntry != null )
{
rowSetToList( pRowSet );
pRowSet.pTree = rowSetListToTree( pRowSet.pEntry );
pRowSet.pEntry = null;
pRowSet.pLast = null;
}
pRowSet.iBatch = iBatch;
}
p = pRowSet.pTree;
while ( p != null )
{
if ( p.v < iRowid )
{
p = p.pRight;
}
else if ( p.v > iRowid )
{
p = p.pLeft;
}
else
{
return 1;
}
}
return 0;
}
}
}
| |
//
// TreeStoreBackend.cs
//
// Author:
// Lluis Sanchez <lluis@xamarin.com>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using Xwt.Backends;
using Xwt.Drawing;
using Gtk;
#if XWT_GTK3
using TreeModel = Gtk.ITreeModel;
#endif
namespace Xwt.GtkBackend
{
class IterPos: TreePosition
{
public IterPos ()
{
}
public IterPos (int treeVersion, Gtk.TreeIter iter)
{
this.Iter = iter;
this.Version = treeVersion;
}
public Gtk.TreeIter Iter;
public Gtk.TreeIter LastChildIter;
public int LastChildIndex = -1;
public int ChildrenCount = -1;
public int Version;
}
public class TreeStoreBackend: TableStoreBackend, ITreeStoreBackend
{
int version;
public Gtk.TreeStore Tree {
get { return (Gtk.TreeStore) Store; }
}
public override TreeModel InitializeModel (Type[] columnTypes)
{
return new Gtk.TreeStore (columnTypes);
}
public event EventHandler<TreeNodeEventArgs> NodeInserted;
public event EventHandler<TreeNodeChildEventArgs> NodeDeleted;
public event EventHandler<TreeNodeEventArgs> NodeChanged;
public event EventHandler<TreeNodeOrderEventArgs> NodesReordered;
IterPos GetIterPos (TreePosition pos)
{
IterPos tpos = (IterPos) pos;
if (tpos != null && tpos.Version != version) {
tpos.LastChildIndex = -1;
tpos.ChildrenCount = -1;
}
return tpos;
}
public void Clear ()
{
version++;
Tree.Clear ();
}
public TreePosition GetChild (TreePosition pos, int index)
{
IterPos tpos = GetIterPos (pos);
if (tpos != null && tpos.LastChildIndex == index)
return new IterPos (version, tpos.LastChildIter);
if (index == 0) {
if (tpos != null) {
Gtk.TreeIter it;
if (Tree.IterChildren (out it, tpos.Iter)) {
tpos.LastChildIter = it;
tpos.LastChildIndex = 0;
return new IterPos (version, it);
}
} else {
Gtk.TreeIter it;
if (Tree.GetIterFirst (out it))
return new IterPos (version, it);
}
return null;
}
if (tpos == null) {
Gtk.TreeIter it;
if (Tree.IterNthChild (out it, index))
return new IterPos (version, it);
else
return null;
}
if (tpos.LastChildIndex == -1 || tpos.LastChildIndex > index) {
Gtk.TreeIter it;
if (Tree.IterNthChild (out it, tpos.Iter, index)) {
tpos.LastChildIter = it;
tpos.LastChildIndex = index;
return new IterPos (version, it);
} else
return null;
}
// tpos.LastChildIndex < index
Gtk.TreeIter iter = tpos.LastChildIter;
for (int n = tpos.LastChildIndex; n < index; n++) {
if (!Tree.IterNext (ref iter))
return null;
}
tpos.LastChildIter = iter;
tpos.LastChildIndex = index;
return new IterPos (version, iter);
}
public int GetChildrenCount (TreePosition pos)
{
if (pos == null)
return Tree.IterNChildren ();
IterPos tpos = GetIterPos (pos);
if (tpos.ChildrenCount != -1)
return tpos.ChildrenCount;
return tpos.ChildrenCount = Tree.IterNChildren (tpos.Iter);
}
public void SetValue (TreePosition pos, int column, object value)
{
IterPos tpos = GetIterPos (pos);
SetValue (tpos.Iter, column, value);
if (NodeChanged != null)
NodeChanged (this, new TreeNodeEventArgs (pos));
}
public object GetValue (TreePosition pos, int column)
{
IterPos tpos = GetIterPos (pos);
return GetValue (tpos.Iter, column);
}
public TreePosition InsertBefore (TreePosition pos)
{
version++;
IterPos tpos = GetIterPos (pos);
var p = Tree.InsertNodeBefore (tpos.Iter);
var node = new IterPos (version, p);
if (NodeInserted != null)
NodeInserted (this, new TreeNodeEventArgs (node));
return node;
}
public TreePosition InsertAfter (TreePosition pos)
{
version++;
IterPos tpos = GetIterPos (pos);
var p = Tree.InsertNodeAfter (tpos.Iter);
var node = new IterPos (version, p);
if (NodeInserted != null)
NodeInserted (this, new TreeNodeEventArgs (node));
return node;
}
public TreePosition AddChild (TreePosition pos)
{
version++;
IterPos tpos = GetIterPos (pos);
Gtk.TreeIter it;
if (pos == null)
it = Tree.AppendNode ();
else
it = Tree.AppendNode (tpos.Iter);
var node = new IterPos (version, it);
if (NodeInserted != null)
NodeInserted (this, new TreeNodeEventArgs (node));
return node;
}
public void Remove (TreePosition pos)
{
version++;
IterPos tpos = GetIterPos (pos);
Gtk.TreeIter it = tpos.Iter;
var delPath = Tree.GetPath (it);
var eventArgs = new TreeNodeChildEventArgs (GetParent (tpos), delPath.Indices[delPath.Indices.Length - 1]);
Tree.Remove (ref it);
if (NodeDeleted != null)
NodeDeleted (this, eventArgs);
}
public TreePosition GetNext (TreePosition pos)
{
IterPos tpos = GetIterPos (pos);
Gtk.TreeIter it = tpos.Iter;
if (!Tree.IterNext (ref it))
return null;
return new IterPos (version, it);
}
public TreePosition GetPrevious (TreePosition pos)
{
IterPos tpos = GetIterPos (pos);
Gtk.TreePath path = Tree.GetPath (tpos.Iter);
int [] indices = path.Indices;
if (indices.Length < 1 || indices [indices.Length - 1] == 0)
return null;
indices [indices.Length - 1] --;
Gtk.TreePath previousPath = new Gtk.TreePath (indices);
Gtk.TreeIter previous;
if (!Tree.GetIter (out previous, previousPath))
return null;
return new IterPos (version, previous);
}
public TreePosition GetParent (TreePosition pos)
{
IterPos tpos = GetIterPos (pos);
Gtk.TreeIter it;
if (!Tree.IterParent (out it, tpos.Iter))
return null;
return new IterPos (version, it);
}
public void EnableEvent (object eventId)
{
}
public void DisableEvent (object eventId)
{
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type WorkbookTableColumnRequest.
/// </summary>
public partial class WorkbookTableColumnRequest : BaseRequest, IWorkbookTableColumnRequest
{
/// <summary>
/// Constructs a new WorkbookTableColumnRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public WorkbookTableColumnRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Creates the specified WorkbookTableColumn using POST.
/// </summary>
/// <param name="workbookTableColumnToCreate">The WorkbookTableColumn to create.</param>
/// <returns>The created WorkbookTableColumn.</returns>
public System.Threading.Tasks.Task<WorkbookTableColumn> CreateAsync(WorkbookTableColumn workbookTableColumnToCreate)
{
return this.CreateAsync(workbookTableColumnToCreate, CancellationToken.None);
}
/// <summary>
/// Creates the specified WorkbookTableColumn using POST.
/// </summary>
/// <param name="workbookTableColumnToCreate">The WorkbookTableColumn to create.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created WorkbookTableColumn.</returns>
public async System.Threading.Tasks.Task<WorkbookTableColumn> CreateAsync(WorkbookTableColumn workbookTableColumnToCreate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
var newEntity = await this.SendAsync<WorkbookTableColumn>(workbookTableColumnToCreate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(newEntity);
return newEntity;
}
/// <summary>
/// Deletes the specified WorkbookTableColumn.
/// </summary>
/// <returns>The task to await.</returns>
public System.Threading.Tasks.Task DeleteAsync()
{
return this.DeleteAsync(CancellationToken.None);
}
/// <summary>
/// Deletes the specified WorkbookTableColumn.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The task to await.</returns>
public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken)
{
this.Method = "DELETE";
await this.SendAsync<WorkbookTableColumn>(null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified WorkbookTableColumn.
/// </summary>
/// <returns>The WorkbookTableColumn.</returns>
public System.Threading.Tasks.Task<WorkbookTableColumn> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the specified WorkbookTableColumn.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The WorkbookTableColumn.</returns>
public async System.Threading.Tasks.Task<WorkbookTableColumn> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var retrievedEntity = await this.SendAsync<WorkbookTableColumn>(null, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(retrievedEntity);
return retrievedEntity;
}
/// <summary>
/// Updates the specified WorkbookTableColumn using PATCH.
/// </summary>
/// <param name="workbookTableColumnToUpdate">The WorkbookTableColumn to update.</param>
/// <returns>The updated WorkbookTableColumn.</returns>
public System.Threading.Tasks.Task<WorkbookTableColumn> UpdateAsync(WorkbookTableColumn workbookTableColumnToUpdate)
{
return this.UpdateAsync(workbookTableColumnToUpdate, CancellationToken.None);
}
/// <summary>
/// Updates the specified WorkbookTableColumn using PATCH.
/// </summary>
/// <param name="workbookTableColumnToUpdate">The WorkbookTableColumn to update.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The updated WorkbookTableColumn.</returns>
public async System.Threading.Tasks.Task<WorkbookTableColumn> UpdateAsync(WorkbookTableColumn workbookTableColumnToUpdate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "PATCH";
var updatedEntity = await this.SendAsync<WorkbookTableColumn>(workbookTableColumnToUpdate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(updatedEntity);
return updatedEntity;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookTableColumnRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookTableColumnRequest Expand(Expression<Func<WorkbookTableColumn, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookTableColumnRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookTableColumnRequest Select(Expression<Func<WorkbookTableColumn, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Initializes any collection properties after deserialization, like next requests for paging.
/// </summary>
/// <param name="workbookTableColumnToInitialize">The <see cref="WorkbookTableColumn"/> with the collection properties to initialize.</param>
private void InitializeCollectionProperties(WorkbookTableColumn workbookTableColumnToInitialize)
{
}
}
}
| |
using Lucene.Net.Codecs;
using Lucene.Net.Util;
using Lucene.Net.Util.Packed;
using System.Collections.Generic;
using System.Diagnostics;
namespace Lucene.Net.Index
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// Buffers up pending byte[] per doc, deref and sorting via
/// int ord, then flushes when segment flushes.
/// </summary>
internal class SortedDocValuesWriter : DocValuesWriter
{
internal readonly BytesRefHash Hash;
private AppendingDeltaPackedLongBuffer Pending;
private readonly Counter IwBytesUsed;
private long BytesUsed; // this currently only tracks differences in 'pending'
private readonly FieldInfo FieldInfo;
private const int EMPTY_ORD = -1;
public SortedDocValuesWriter(FieldInfo fieldInfo, Counter iwBytesUsed)
{
this.FieldInfo = fieldInfo;
this.IwBytesUsed = iwBytesUsed;
Hash = new BytesRefHash(new ByteBlockPool(new ByteBlockPool.DirectTrackingAllocator(iwBytesUsed)), BytesRefHash.DEFAULT_CAPACITY, new BytesRefHash.DirectBytesStartArray(BytesRefHash.DEFAULT_CAPACITY, iwBytesUsed));
Pending = new AppendingDeltaPackedLongBuffer(PackedInts.COMPACT);
BytesUsed = Pending.RamBytesUsed();
iwBytesUsed.AddAndGet(BytesUsed);
}
public virtual void AddValue(int docID, BytesRef value)
{
if (docID < Pending.Size())
{
throw new System.ArgumentException("DocValuesField \"" + FieldInfo.Name + "\" appears more than once in this document (only one value is allowed per field)");
}
if (value == null)
{
throw new System.ArgumentException("field \"" + FieldInfo.Name + "\": null value not allowed");
}
if (value.Length > (ByteBlockPool.BYTE_BLOCK_SIZE - 2))
{
throw new System.ArgumentException("DocValuesField \"" + FieldInfo.Name + "\" is too large, must be <= " + (ByteBlockPool.BYTE_BLOCK_SIZE - 2));
}
// Fill in any holes:
while (Pending.Size() < docID)
{
Pending.Add(EMPTY_ORD);
}
AddOneValue(value);
}
internal override void Finish(int maxDoc)
{
while (Pending.Size() < maxDoc)
{
Pending.Add(EMPTY_ORD);
}
UpdateBytesUsed();
}
private void AddOneValue(BytesRef value)
{
int termID = Hash.Add(value);
if (termID < 0)
{
termID = -termID - 1;
}
else
{
// reserve additional space for each unique value:
// 1. when indexing, when hash is 50% full, rehash() suddenly needs 2*size ints.
// TODO: can this same OOM happen in THPF?
// 2. when flushing, we need 1 int per value (slot in the ordMap).
IwBytesUsed.AddAndGet(2 * RamUsageEstimator.NUM_BYTES_INT);
}
Pending.Add(termID);
UpdateBytesUsed();
}
private void UpdateBytesUsed()
{
long newBytesUsed = Pending.RamBytesUsed();
IwBytesUsed.AddAndGet(newBytesUsed - BytesUsed);
BytesUsed = newBytesUsed;
}
internal override void Flush(SegmentWriteState state, DocValuesConsumer dvConsumer)
{
int maxDoc = state.SegmentInfo.DocCount;
Debug.Assert(Pending.Size() == maxDoc);
int valueCount = Hash.Size();
int[] sortedValues = Hash.Sort(BytesRef.UTF8SortedAsUnicodeComparer);
int[] ordMap = new int[valueCount];
for (int ord = 0; ord < valueCount; ord++)
{
ordMap[sortedValues[ord]] = ord;
}
dvConsumer.AddSortedField(FieldInfo, GetBytesRefEnumberable(valueCount, sortedValues),
// doc -> ord
GetOrdsEnumberable(maxDoc, ordMap));
}
internal override void Abort()
{
}
private IEnumerable<BytesRef> GetBytesRefEnumberable(int valueCount, int[] sortedValues)
{
for (int i = 0; i < valueCount; ++i)
{
var scratch = new BytesRef();
yield return Hash.Get(sortedValues[i], scratch);
}
}
private IEnumerable<long?> GetOrdsEnumberable(int maxDoc, int[] ordMap)
{
AppendingDeltaPackedLongBuffer.Iterator iter = Pending.GetIterator();
for (int i = 0; i < maxDoc; ++i)
{
int ord = (int)iter.Next();
yield return ord == -1 ? ord : ordMap[ord];
}
}
/*
private class IterableAnonymousInnerClassHelper : IEnumerable<BytesRef>
{
private readonly SortedDocValuesWriter OuterInstance;
private int ValueCount;
private int[] SortedValues;
public IterableAnonymousInnerClassHelper(SortedDocValuesWriter outerInstance, int valueCount, int[] sortedValues)
{
this.OuterInstance = outerInstance;
this.ValueCount = valueCount;
this.SortedValues = sortedValues;
}
// ord -> value
public virtual IEnumerator<BytesRef> GetEnumerator()
{
return new ValuesIterator(OuterInstance, SortedValues, ValueCount);
}
}
private class IterableAnonymousInnerClassHelper2 : IEnumerable<Number>
{
private readonly SortedDocValuesWriter OuterInstance;
private int MaxDoc;
private int[] OrdMap;
public IterableAnonymousInnerClassHelper2(SortedDocValuesWriter outerInstance, int maxDoc, int[] ordMap)
{
this.OuterInstance = outerInstance;
this.MaxDoc = maxDoc;
this.OrdMap = ordMap;
}
public virtual IEnumerator<Number> GetEnumerator()
{
return new OrdsIterator(OuterInstance, OrdMap, MaxDoc);
}
}
public override void Abort()
{
}
// iterates over the unique values we have in ram
private class ValuesIterator : IEnumerator<BytesRef>
{
private readonly SortedDocValuesWriter OuterInstance;
internal readonly int[] SortedValues;
internal readonly BytesRef Scratch = new BytesRef();
internal readonly int ValueCount;
internal int OrdUpto;
internal ValuesIterator(SortedDocValuesWriter outerInstance, int[] sortedValues, int valueCount)
{
this.OuterInstance = outerInstance;
this.SortedValues = sortedValues;
this.ValueCount = valueCount;
}
public override bool HasNext()
{
return OrdUpto < ValueCount;
}
public override BytesRef Next()
{
if (!HasNext())
{
throw new Exception();
}
OuterInstance.Hash.Get(SortedValues[OrdUpto], Scratch);
OrdUpto++;
return Scratch;
}
public override void Remove()
{
throw new System.NotSupportedException();
}
}
// iterates over the ords for each doc we have in ram
private class OrdsIterator : IEnumerator<Number>
{
internal bool InstanceFieldsInitialized = false;
internal virtual void InitializeInstanceFields()
{
Iter = OuterInstance.Pending.Iterator();
}
private readonly SortedDocValuesWriter OuterInstance;
internal AppendingDeltaPackedLongBuffer.Iterator Iter;
internal readonly int[] OrdMap;
internal readonly int MaxDoc;
internal int DocUpto;
internal OrdsIterator(SortedDocValuesWriter outerInstance, int[] ordMap, int maxDoc)
{
this.OuterInstance = outerInstance;
if (!InstanceFieldsInitialized)
{
InitializeInstanceFields();
InstanceFieldsInitialized = true;
}
this.OrdMap = ordMap;
this.MaxDoc = maxDoc;
Debug.Assert(outerInstance.Pending.Size() == maxDoc);
}
public override bool HasNext()
{
return DocUpto < MaxDoc;
}
public override Number Next()
{
if (!HasNext())
{
throw new Exception();
}
int ord = (int) Iter.next();
DocUpto++;
return ord == -1 ? ord : OrdMap[ord];
}
public override void Remove()
{
throw new System.NotSupportedException();
}
}*/
}
}
| |
/*
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License. See License.txt in the project root for license information.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using System.Web.Routing;
using Adxstudio.Xrm.AspNet.Cms;
using Adxstudio.Xrm.Blogs;
using Adxstudio.Xrm.Cms;
using Adxstudio.Xrm.Resources;
using Microsoft.Xrm.Client;
using Microsoft.Xrm.Portal.Configuration;
using Microsoft.Xrm.Portal.Web;
using Microsoft.Xrm.Portal.Web.Providers;
using Microsoft.Xrm.Sdk;
using Microsoft.Xrm.Sdk.Client;
namespace Adxstudio.Xrm.Web.Providers
{
public class AdxEntityUrlProvider : EntityUrlProvider
{
public AdxEntityUrlProvider(IEntityWebsiteProvider websiteProvider, string portalName = null) : base(websiteProvider)
{
PortalName = portalName;
}
protected string PortalName { get; private set; }
public override ApplicationPath GetApplicationPath(OrganizationServiceContext context, Entity entity)
{
if (context == null)
{
throw new ArgumentNullException("context");
}
if (entity == null)
{
return null;
}
if (entity.LogicalName == "adx_communityforumpost")
{
var thread = entity.GetRelatedEntity(context, "adx_communityforumthread_communityforumpost".ToRelationship());
if (thread != null)
{
return GetForumPostApplicationPath(context, entity, thread);
}
}
if (entity.LogicalName == "adx_blogpostcomment")
{
var post = entity.GetRelatedEntity(context, "adx_blogpost_blogpostcomment".ToRelationship());
if (post != null)
{
return GetBlogPostCommentApplicationPath(context, entity, post);
}
}
if (entity.LogicalName == "adx_shortcut")
{
return GetShortcutApplicationPath(context, entity);
}
if (entity.LogicalName == "adx_ideaforum")
{
return GetIdeaForumApplicationPath(context, entity);
}
if (entity.LogicalName == "adx_idea")
{
return GetIdeaApplicationPath(context, entity);
}
if (entity.LogicalName == "adx_issue")
{
return GetIssueApplicationPath(context, entity);
}
if (entity.LogicalName == "incident")
{
return GetIncidentApplicationPath(context, entity);
}
if (entity.LogicalName == "kbarticle")
{
return GetKbArticleApplicationPath(context, entity);
}
if (entity.LogicalName == "knowledgearticle")
{
return GetKnowledgeArticleApplicationPath(context, entity);
}
if (entity.LogicalName == "category")
{
return GetCategoryApplicationPath(context, entity);
}
// We want new behaviour for adx_webpages -- paths for this entity will now have a trailing slash ('/').
if (entity.LogicalName == "adx_webpage")
{
var path = base.GetApplicationPath(context, entity);
// If the path is an external URL (it shouldn't be, but just in case), return the original path untouched.
if (path.ExternalUrl != null)
{
return path;
}
// If the path does not already have a trailing slash (it shouldn't), append one.
return path.AppRelativePath.EndsWith("/")
? path
: ApplicationPath.FromAppRelativePath("{0}/".FormatWith(path.AppRelativePath));
}
// Support adx_webfiles with a parent adx_blogpost, instead of adx_webpage.
if (entity.LogicalName == "adx_webfile" && entity.GetAttributeValue<EntityReference>("adx_blogpostid") != null)
{
var post = entity.GetRelatedEntity(context, "adx_blogpost_webfile".ToRelationship());
if (post != null)
{
var postPath = GetApplicationPath(context, post);
var filePartialUrl = entity.GetAttributeValue<string>("adx_partialurl");
if (postPath != null && filePartialUrl != null)
{
return ApplicationPath.FromAppRelativePath("{0}/{1}".FormatWith(postPath.AppRelativePath.TrimEnd('/'), filePartialUrl));
}
}
}
var lookup = new Dictionary<string, Tuple<string[], Relationship, string, string, bool>>
{
{
"adx_communityforumthread",
new Tuple<string[], Relationship, string, string, bool>(
new[] { "adx_communityforumthreadid" },
"adx_communityforum_communityforumthread".ToRelationship(),
"adx_communityforum",
null,
false)
},
{
"adx_communityforum",
new Tuple<string[], Relationship, string, string, bool>(
new[] { "adx_partialurl" },
"adx_webpage_communityforum".ToRelationship(),
"adx_webpage",
"Forums",
false)
},
{
"adx_event",
new Tuple<string[], Relationship, string, string, bool>(
new[] { "adx_partialurl" },
"adx_webpage_event".ToRelationship(),
"adx_webpage",
"Events",
false)
},
{
"adx_survey",
new Tuple<string[], Relationship, string, string, bool>(
new[] { "adx_partialurl" },
"adx_webpage_survey".ToRelationship(),
"adx_webpage",
"Surveys",
false)
},
{
"adx_blog",
new Tuple<string[], Relationship, string, string, bool>(
new[] { "adx_partialurl" },
"adx_webpage_blog".ToRelationship(),
"adx_webpage",
null,
true)
},
{
"adx_blogpost",
new Tuple<string[], Relationship, string, string, bool>(
new[] { "adx_partialurl", "adx_blogpostid" },
"adx_blog_blogpost".ToRelationship(),
"adx_blog",
null,
true)
},
};
Tuple<string[], Relationship, string, string, bool> urlData;
if (lookup.TryGetValue(entity.LogicalName, out urlData))
{
var partialUrlLogicalName = urlData.Item1.FirstOrDefault(logicalName =>
{
var partialUrlValue = entity.GetAttributeValue(logicalName);
return partialUrlValue != null && !string.IsNullOrWhiteSpace(partialUrlValue.ToString());
});
if (partialUrlLogicalName == null)
{
return null;
}
var relationship = urlData.Item2;
var siteMarker = urlData.Item4;
var addTrailingSlash = urlData.Item5;
var websiteRelativeUrl = GetApplicationPath(context, entity, partialUrlLogicalName, relationship, GetApplicationPath, siteMarker);
if (websiteRelativeUrl != null)
{
if (addTrailingSlash && websiteRelativeUrl.PartialPath != null && !websiteRelativeUrl.PartialPath.EndsWith("/"))
{
websiteRelativeUrl = ApplicationPath.FromPartialPath("{0}/".FormatWith(websiteRelativeUrl.PartialPath));
}
var website = WebsiteProvider.GetWebsite(context, entity);
var path = WebsitePathUtility.ToAbsolute(website, websiteRelativeUrl.PartialPath);
return ApplicationPath.FromPartialPath(path);
}
}
return base.GetApplicationPath(context, entity);
}
private ApplicationPath GetBlogPostCommentApplicationPath(OrganizationServiceContext context, Entity comment, Entity post)
{
var postPath = GetApplicationPath(context, post);
if (postPath == null || postPath.AppRelativePath == null || postPath.AppRelativePath.Contains("#"))
{
return postPath;
}
return ApplicationPath.FromAppRelativePath("{0}#{1}".FormatWith(postPath.AppRelativePath, BlogPostComment.GetAnchorName(comment.Id)));
}
private ApplicationPath GetForumPostApplicationPath(OrganizationServiceContext context, Entity post, Entity thread)
{
var threadPath = GetApplicationPath(context, thread);
if (threadPath == null || threadPath.AppRelativePath == null || threadPath.AppRelativePath.Contains("#"))
{
return threadPath;
}
return ApplicationPath.FromAppRelativePath("{0}#{1}".FormatWith(threadPath.AppRelativePath, post.Id));
}
private ApplicationPath GetShortcutApplicationPath(OrganizationServiceContext context, Entity shortcut)
{
shortcut.AssertEntityName("adx_shortcut");
var relatedWebPage = shortcut.GetRelatedEntity(context, "adx_webpage_shortcut".ToRelationship());
if (relatedWebPage != null)
{
return GetApplicationPath(context, relatedWebPage);
}
var relatedWebFile = shortcut.GetRelatedEntity(context, "adx_webfile_shortcut".ToRelationship());
if (relatedWebFile != null)
{
return GetApplicationPath(context, relatedWebFile);
}
var relatedEvent = shortcut.GetRelatedEntity(context, "adx_event_shortcut".ToRelationship());
if (relatedEvent != null)
{
return GetApplicationPath(context, relatedEvent);
}
var relatedForum = shortcut.GetRelatedEntity(context, "adx_communityforum_shortcut".ToRelationship());
if (relatedForum != null)
{
return GetApplicationPath(context, relatedForum);
}
var externalUrl = shortcut.GetAttributeValue<string>("adx_externalurl");
if (!string.IsNullOrEmpty(externalUrl))
{
return ApplicationPath.FromExternalUrl(externalUrl);
}
return null;
}
private ApplicationPath GetIdeaForumApplicationPath(OrganizationServiceContext context, Entity ideaForum)
{
ideaForum.AssertEntityName("adx_ideaforum");
var httpContext = HttpContext.Current;
if (httpContext == null)
{
return null;
}
var partialUrl = ideaForum.GetAttributeValue<string>("adx_partialurl");
if (string.IsNullOrEmpty(partialUrl))
{
return null;
}
var httpContextWrapper = new HttpContextWrapper(HttpContext.Current);
var routeData = RouteTable.Routes.GetRouteData(httpContextWrapper);
if (routeData == null)
{
return null;
}
var urlHelper = new UrlHelper(new RequestContext(httpContextWrapper, routeData));
// If multi-language is enabled, return URL using in approriate multi-language URL format.
var contextLanguageInfo = HttpContext.Current.GetContextLanguageInfo();
string url = string.Empty;
if (contextLanguageInfo.IsCrmMultiLanguageEnabled && ContextLanguageInfo.DisplayLanguageCodeInUrl)
{
url = string.Format("{0}{1}", httpContext.Request.Url.GetLeftPart(UriPartial.Authority), urlHelper.Action("Ideas", "Ideas", new { ideaForumPartialUrl = partialUrl, area = "Ideas" }));
url = contextLanguageInfo.FormatUrlWithLanguage(false, contextLanguageInfo.ContextLanguage.Code, new Uri(url));
}
else
{
url = urlHelper.Action("Ideas", "Ideas", new { ideaForumPartialUrl = partialUrl, area = "Ideas" });
}
return url == null ? null : ApplicationPath.FromAbsolutePath(url);
}
private ApplicationPath GetIdeaApplicationPath(OrganizationServiceContext context, Entity idea)
{
idea.AssertEntityName("adx_idea");
var httpContext = HttpContext.Current;
if (httpContext == null)
{
return null;
}
var partialUrl = idea.GetAttributeValue<string>("adx_partialurl");
if (string.IsNullOrEmpty(partialUrl))
{
return null;
}
var ideaForumEntityReference = idea.GetAttributeValue<EntityReference>("adx_ideaforumid");
if (ideaForumEntityReference == null)
{
return null;
}
var forum = context.CreateQuery("adx_ideaforum").FirstOrDefault(e => e.GetAttributeValue<EntityReference>("adx_ideaforumid").Id == ideaForumEntityReference.Id);
if (forum == null)
{
return null;
}
var forumPartialUrl = forum.GetAttributeValue<string>("adx_partialurl");
if (string.IsNullOrEmpty(forumPartialUrl))
{
return null;
}
var httpContextWrapper = new HttpContextWrapper(HttpContext.Current);
var routeData = RouteTable.Routes.GetRouteData(httpContextWrapper);
if (routeData == null)
{
return null;
}
var urlHelper = new UrlHelper(new RequestContext(httpContextWrapper, routeData));
// If multi-language is enabled, return URL using in approriate multi-language URL format.
var contextLanguageInfo = HttpContext.Current.GetContextLanguageInfo();
string url = string.Empty;
if (contextLanguageInfo.IsCrmMultiLanguageEnabled && ContextLanguageInfo.DisplayLanguageCodeInUrl)
{
url = string.Format("{0}{1}", httpContext.Request.Url.GetLeftPart(UriPartial.Authority), urlHelper.Action("Ideas", "Ideas", new { ideaForumPartialUrl = forumPartialUrl, ideaPartialUrl = partialUrl, area = "Ideas" }));
url = contextLanguageInfo.FormatUrlWithLanguage(false, contextLanguageInfo.ContextLanguage.Code, new Uri(url));
}
else
{
url = urlHelper.Action("Ideas", "Ideas", new { ideaForumPartialUrl = forumPartialUrl, ideaPartialUrl = partialUrl, area = "Ideas" });
}
return url == null ? null : ApplicationPath.FromAbsolutePath(url);
}
private ApplicationPath GetIssueApplicationPath(OrganizationServiceContext context, Entity issue)
{
issue.AssertEntityName("adx_issue");
var httpContext = HttpContext.Current;
if (httpContext == null)
{
return null;
}
var partialUrl = issue.GetAttributeValue<string>("adx_partialurl");
if (string.IsNullOrEmpty(partialUrl))
{
return null;
}
var forum = issue.GetRelatedEntity(context, new Relationship("adx_issueforum_issue"));
if (forum == null)
{
return null;
}
var forumPartialUrl = forum.GetAttributeValue<string>("adx_partialurl");
if (string.IsNullOrEmpty(forumPartialUrl))
{
return null;
}
var httpContextWrapper = new HttpContextWrapper(HttpContext.Current);
var routeData = RouteTable.Routes.GetRouteData(httpContextWrapper);
if (routeData == null)
{
return null;
}
var urlHelper = new UrlHelper(new RequestContext(httpContextWrapper, routeData));
var url = urlHelper.Action("Issues", "Issues", new { issueForumPartialUrl = forumPartialUrl, issuePartialUrl = partialUrl, area = "Issues" });
return url == null ? null : ApplicationPath.FromAbsolutePath(url);
}
private ApplicationPath GetIncidentApplicationPath(OrganizationServiceContext context, Entity incident)
{
incident.AssertEntityName("incident");
var portalContext = PortalCrmConfigurationManager.CreatePortalContext(PortalName);
if (portalContext == null || portalContext.Website == null)
{
return null;
}
var website = context.CreateQuery("adx_website")
.FirstOrDefault(w => w.GetAttributeValue<Guid>("adx_websiteid") == portalContext.Website.Id);
if (website == null)
{
return null;
}
var page = context.GetPageBySiteMarkerName(website, "Case");
if (page == null)
{
return null;
}
var pagePath = GetApplicationPath(context, page);
if (pagePath == null)
{
return null;
}
var incidentUrl = new UrlBuilder(pagePath.AbsolutePath);
incidentUrl.QueryString.Set("caseid", incident.Id.ToString());
return ApplicationPath.FromAbsolutePath(incidentUrl.PathWithQueryString);
}
private ApplicationPath GetKbArticleApplicationPath(OrganizationServiceContext context, Entity kbarticle)
{
kbarticle.AssertEntityName("kbarticle");
var number = kbarticle.GetAttributeValue<string>("number");
if (string.IsNullOrEmpty(number))
{
return null;
}
var httpContext = HttpContext.Current;
if (httpContext == null)
{
return null;
}
var httpContextWrapper = new HttpContextWrapper(HttpContext.Current);
var routeData = RouteTable.Routes.GetRouteData(httpContextWrapper);
if (routeData == null)
{
return null;
}
var urlHelper = new UrlHelper(new RequestContext(httpContextWrapper, routeData));
var url = urlHelper.Action("Index", "Article", new { number = number, area = "KnowledgeBase" });
return url == null ? null : ApplicationPath.FromAbsolutePath(url);
}
private ApplicationPath GetKnowledgeArticleApplicationPath(OrganizationServiceContext context, Entity article)
{
article.AssertEntityName("knowledgearticle");
var number = article.GetAttributeValue<string>("articlepublicnumber");
if (string.IsNullOrEmpty(number))
{
return null;
}
var httpContext = HttpContext.Current;
if (httpContext == null)
{
return null;
}
var httpContextWrapper = new HttpContextWrapper(HttpContext.Current);
var routeData = RouteTable.Routes.GetRouteData(httpContextWrapper);
if (routeData == null)
{
return null;
}
var urlHelper = new UrlHelper(new RequestContext(httpContextWrapper, routeData));
var languageLocaleCode = article.Contains("language_locale.code") ? article.GetAttributeValue<AliasedValue>("language_locale.code").Value as string : null;
if (string.IsNullOrWhiteSpace(languageLocaleCode))
{
var localeid = article.GetAttributeValue<EntityReference>("languagelocaleid");
if (localeid != null)
{
var locale = context.CreateQuery("languagelocale").FirstOrDefault(lang => lang.GetAttributeValue<Guid>("languagelocaleid") == localeid.Id);
if (locale != null)
{
languageLocaleCode = locale.GetAttributeValue<string>("code");
}
}
}
// If multi-language is enabled, return URL using in approriate multi-language URL format.
var contextLanguageInfo = HttpContext.Current.GetContextLanguageInfo();
string url = string.Empty;
if (contextLanguageInfo.IsCrmMultiLanguageEnabled && ContextLanguageInfo.DisplayLanguageCodeInUrl)
{
var actionUrl = urlHelper.Action("Article", "Article", new { number = number, area = "KnowledgeManagement" });
// if actionUrl is null, ex: deactivated root page.
if (actionUrl == null)
{
return null;
}
url = string.Format("{0}{1}", httpContext.Request.Url.GetLeftPart(UriPartial.Authority), actionUrl);
url = contextLanguageInfo.FormatUrlWithLanguage(false, languageLocaleCode, new Uri(url));
}
else
{
url = urlHelper.Action("Article", "Article", new { number = number, lang = languageLocaleCode, area = "KnowledgeManagement" });
}
return url == null ? null : ApplicationPath.FromAbsolutePath(url);
}
private ApplicationPath GetCategoryApplicationPath(OrganizationServiceContext context, Entity article)
{
article.AssertEntityName("category");
var number = article.GetAttributeValue<string>("categorynumber");
if (string.IsNullOrEmpty(number))
{
return null;
}
var httpContext = HttpContext.Current;
if (httpContext == null)
{
return null;
}
var httpContextWrapper = new HttpContextWrapper(HttpContext.Current);
var routeData = RouteTable.Routes.GetRouteData(httpContextWrapper);
if (routeData == null)
{
return null;
}
var urlHelper = new UrlHelper(new RequestContext(httpContextWrapper, routeData));
var url = urlHelper.Action("Index", "Category", new { number = number, area = "Category" });
return url == null ? null : ApplicationPath.FromAbsolutePath(url);
}
private static ApplicationPath GetApplicationPath(
OrganizationServiceContext context,
Entity entity,
string partialUrlLogicalName,
Relationship parentEntityRelationship,
Func<OrganizationServiceContext, Entity, ApplicationPath> getParentApplicationPath,
string siteMarker = null)
{
var parentEntity = entity.GetRelatedEntity(context, parentEntityRelationship);
var partialUrlAttributeValue = entity.GetAttributeValue<object>(partialUrlLogicalName);
var partialUrl = partialUrlAttributeValue == null ? null : partialUrlAttributeValue.ToString();
if (parentEntity == null)
{
if (siteMarker == null)
{
return ApplicationPath.FromPartialPath(partialUrl);
}
var siteMarkerPage = context.GetPageBySiteMarkerName(context.GetWebsite(entity), siteMarker);
if (siteMarkerPage == null)
{
return null;
}
var siteMarkerUrl = context.GetApplicationPath(siteMarkerPage);
if (siteMarkerUrl == null)
{
return null;
}
return JoinApplicationPath(siteMarkerUrl.PartialPath, partialUrl);
}
var parentUrl = getParentApplicationPath(context, parentEntity);
if (parentUrl == null)
{
return null;
}
var url = JoinApplicationPath(parentUrl.PartialPath, partialUrl);
return url;
}
internal static ApplicationPath JoinApplicationPath(string basePath, string extendedPath)
{
if (basePath.Contains("?")
|| basePath.Contains(":")
|| basePath.Contains("//")
|| basePath.Contains("&")
|| basePath.Contains("%3f")
|| basePath.Contains("%2f%2f")
|| basePath.Contains("%26"))
{
throw new ApplicationException("Invalid base path");
}
if (extendedPath.Contains("?")
|| extendedPath.Contains("&")
|| extendedPath.Contains("//")
|| extendedPath.Contains(":")
|| extendedPath.Contains("%3f")
|| extendedPath.Contains("%2f%2f")
|| extendedPath.Contains("%26"))
{
throw new ApplicationException("Invalid extendedPath");
}
var path = "{0}/{1}".FormatWith(basePath.TrimEnd('/'), extendedPath.TrimStart('/'));
return ApplicationPath.FromPartialPath(path);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Reflection;
using Elasticsearch.Net;
using Newtonsoft.Json;
namespace Nest
{
public class AggregationDescriptor<T>
where T : class
{
internal readonly IDictionary<string, AggregationDescriptor<T>> _Aggregations =
new Dictionary<string, AggregationDescriptor<T>>();
[JsonProperty("aggs", Order = 100)]
[JsonConverter(typeof(DictionaryKeysAreNotPropertyNamesJsonConverter))]
internal IDictionary<string, AggregationDescriptor<T>> _NestedAggregations;
[JsonProperty("avg")]
internal AverageAggregationDescriptor<T> _Average { get; set; }
public AggregationDescriptor<T> Average(string name, Func<AverageAggregationDescriptor<T>, AverageAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Average = d);
}
[JsonProperty("date_histogram")]
internal DateHistogramAggregationDescriptor<T> _DateHistogram { get; set; }
public AggregationDescriptor<T> DateHistogram(string name,
Func<DateHistogramAggregationDescriptor<T>, DateHistogramAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._DateHistogram = d);
}
[JsonProperty("percentiles")]
internal PercentilesAggregationDescriptor<T> _Percentiles { get; set; }
public AggregationDescriptor<T> Percentiles(string name,
Func<PercentilesAggregationDescriptor<T>, PercentilesAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Percentiles = d);
}
[JsonProperty("date_range")]
internal DateRangeAggregationDescriptor<T> _DateRange { get; set; }
public AggregationDescriptor<T> DateRange(string name,
Func<DateRangeAggregationDescriptor<T>, DateRangeAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._DateRange = d);
}
[JsonProperty("extended_stats")]
internal ExtendedStatsAggregationDescriptor<T> _ExtendedStats { get; set; }
public AggregationDescriptor<T> ExtendedStats(string name,
Func<ExtendedStatsAggregationDescriptor<T>, ExtendedStatsAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._ExtendedStats = d);
}
[JsonProperty("filter")]
internal FilterAggregationDescriptor<T> _Filter { get; set; }
public AggregationDescriptor<T> Filter(string name,
Func<FilterAggregationDescriptor<T>, FilterAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Filter = d);
}
[JsonProperty("geo_distance")]
internal GeoDistanceAggregationDescriptor<T> _GeoDistance { get; set; }
public AggregationDescriptor<T> GeoDistance(string name,
Func<GeoDistanceAggregationDescriptor<T>, GeoDistanceAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._GeoDistance = d);
}
[JsonProperty("geohash_grid")]
internal GeoHashAggregationDescriptor<T> _GeoHash { get; set; }
public AggregationDescriptor<T> GeoHash(string name,
Func<GeoHashAggregationDescriptor<T>, GeoHashAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._GeoHash = d);
}
[JsonProperty("histogram")]
internal HistogramAggregationDescriptor<T> _Histogram { get; set; }
public AggregationDescriptor<T> Histogram(string name,
Func<HistogramAggregationDescriptor<T>, HistogramAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Histogram = d);
}
[JsonProperty("global")]
internal GlobalAggregationDescriptor<T> _Global { get; set; }
public AggregationDescriptor<T> Global(string name,
Func<GlobalAggregationDescriptor<T>, GlobalAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Global = d);
}
[JsonProperty("ip_range")]
internal Ip4RangeAggregationDescriptor<T> _IpRange { get; set; }
public AggregationDescriptor<T> IpRange(string name,
Func<Ip4RangeAggregationDescriptor<T>, Ip4RangeAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._IpRange = d);
}
[JsonProperty("max")]
internal MaxAggregationDescriptor<T> _Max { get; set; }
public AggregationDescriptor<T> Max(string name, Func<MaxAggregationDescriptor<T>, MaxAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Max = d);
}
[JsonProperty("min")]
internal MinAggregationDescriptor<T> _Min { get; set; }
public AggregationDescriptor<T> Min(string name, Func<MinAggregationDescriptor<T>, MinAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Min = d);
}
[JsonProperty("cardinality")]
internal CardinalityAggregationDescriptor<T> _Cardinality { get; set; }
public AggregationDescriptor<T> Cardinality(string name, Func<CardinalityAggregationDescriptor<T>, CardinalityAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Cardinality = d);
}
[JsonProperty("missing")]
internal MissingAggregationDescriptor<T> _Missing { get; set; }
public AggregationDescriptor<T> Missing(string name, Func<MissingAggregationDescriptor<T>, MissingAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Missing = d);
}
[JsonProperty("nested")]
internal NestedAggregationDescriptor<T> _Nested { get; set; }
public AggregationDescriptor<T> Nested(string name, Func<NestedAggregationDescriptor<T>, NestedAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Nested = d);
}
[JsonProperty("range")]
internal RangeAggregationDescriptor<T> _Range { get; set; }
public AggregationDescriptor<T> Range(string name, Func<RangeAggregationDescriptor<T>, RangeAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Range = d);
}
[JsonProperty("stats")]
internal StatsAggregationDescriptor<T> _Stats { get; set; }
public AggregationDescriptor<T> Stats(string name, Func<StatsAggregationDescriptor<T>, StatsAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Stats = d);
}
[JsonProperty("sum")]
internal SumAggregationDescriptor<T> _Sum { get; set; }
public AggregationDescriptor<T> Sum(string name, Func<SumAggregationDescriptor<T>, SumAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Sum = d);
}
[JsonProperty("terms")]
internal TermsAggregationDescriptor<T> _Terms { get; set; }
public AggregationDescriptor<T> Terms(string name, Func<TermsAggregationDescriptor<T>, TermsAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._Terms = d);
}
[JsonProperty("significant_terms")]
internal SignificantTermsAggregationDescriptor<T> _SignificantTerms { get; set; }
public AggregationDescriptor<T> SignificantTerms(string name, Func<SignificantTermsAggregationDescriptor<T>, SignificantTermsAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._SignificantTerms = d);
}
[JsonProperty("value_count")]
internal ValueCountAggregationDescriptor<T> _ValueCount { get; set; }
public AggregationDescriptor<T> ValueCount(string name,
Func<ValueCountAggregationDescriptor<T>, ValueCountAggregationDescriptor<T>> selector)
{
return _SetInnerAggregation(name, selector, (a, d) => a._ValueCount = d);
}
private AggregationDescriptor<T> _SetInnerAggregation<TAggregation>(
string key,
Func<TAggregation, TAggregation> selector
, Action<AggregationDescriptor<T>, TAggregation> setter
)
where TAggregation : IAggregationDescriptor, new()
{
var innerDescriptor = selector(new TAggregation());
var descriptor = new AggregationDescriptor<T>();
setter(descriptor, innerDescriptor);
var bucket = innerDescriptor as IBucketAggregationDescriptor<T>;
if (bucket != null && bucket.NestedAggregations.HasAny())
{
descriptor._NestedAggregations = bucket.NestedAggregations;
}
this._Aggregations[key] = descriptor;
return this;
}
}
}
| |
using System;
using System.IO;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.Extensions.Configuration;
using Toucan.Common;
using Toucan.Data.Model;
namespace Toucan.Data
{
public sealed class NpgSqlContext : DbContextBase, IDesignTimeDbContextFactory<NpgSqlContext>
{
public NpgSqlContext() : base()
{
}
public NpgSqlContext(DbContextOptions<NpgSqlContext> options) : base(options)
{
}
public NpgSqlContext CreateDbContext(string[] args)
{
var optionsBuilder = new DbContextOptionsBuilder<NpgSqlContext>();
optionsBuilder.UseNpgsql(this.DesignTimeConfig?.ConnectionString, o =>
{
string assemblyName = typeof(NpgSqlContext).GetAssemblyName();
o.MigrationsAssembly(assemblyName);
});
return new NpgSqlContext(optionsBuilder.Options);
}
protected sealed override void OnModelCreating(ModelBuilder modelBuilder)
{
base.BeforeModelCreated(modelBuilder);
CreateModel(modelBuilder, this.DesignTimeConfig?.SchemaName);
base.AfterModelCreated(modelBuilder);
}
private static void CreateModel(ModelBuilder modelBuilder, string schemaName)
{
modelBuilder.Entity<Provider>(entity =>
{
entity.Property(e => e.ProviderId).HasMaxLength(64);
entity.Property(e => e.Description)
.IsRequired()
.HasMaxLength(512);
entity.Property(e => e.Enabled);
entity.Property(e => e.Name)
.IsRequired()
.HasMaxLength(128);
});
modelBuilder.Entity<Role>(entity =>
{
entity.HasKey(e => e.RoleId)
.HasName("PK_RoleId");
entity.Property(e => e.RoleId)
.IsRequired()
.HasMaxLength(32);
entity.Property(e => e.ParentRoleId)
.HasMaxLength(32);
entity.HasOne(e => e.Parent)
.WithMany()
.HasForeignKey(o => o.ParentRoleId)
.IsRequired(false);
entity.Property(e => e.Enabled);
entity.Property(e => e.Name)
.IsRequired()
.HasMaxLength(64);
entity.Property(e => e.CreatedOn)
.IsRequired()
.HasColumnType("timestamp WITH TIME ZONE")
.HasDefaultValueSql("current_timestamp AT TIME ZONE 'UTC'");
entity.Property(e => e.LastUpdatedOn)
.HasColumnType("timestamp WITH TIME ZONE");
entity.HasOne(e => e.CreatedByUser)
.WithMany()
.HasForeignKey(o => o.CreatedBy)
.IsRequired();
entity.HasOne(e => e.LastUpdatedByUser)
.WithMany()
.HasForeignKey(o => o.LastUpdatedBy)
.IsRequired(false);
});
modelBuilder.Entity<SecurityClaim>(entity =>
{
entity.Property(e => e.SecurityClaimId)
.IsRequired()
.HasMaxLength(32);
entity.Property(e => e.Description)
.IsRequired()
.HasMaxLength(512);
entity.Property(e => e.CreatedOn)
.IsRequired()
.HasColumnType("timestamp WITH TIME ZONE")
.HasDefaultValueSql("current_timestamp AT TIME ZONE 'UTC'");
entity.HasOne(e => e.CreatedByUser)
.WithMany()
.HasForeignKey(o => o.CreatedBy)
.IsRequired();
entity.HasOne(e => e.LastUpdatedByUser)
.WithMany()
.HasForeignKey(o => o.LastUpdatedBy)
.IsRequired(false);
});
modelBuilder.Entity<RoleSecurityClaim>(entity =>
{
entity.HasKey(e => new { e.RoleId, e.SecurityClaimId })
.HasName("PK_RoleSecurityClaim");
entity.Property(e => e.RoleId)
.IsRequired()
.HasMaxLength(32);
entity.HasOne(e => e.Role)
.WithMany(p => p.SecurityClaims)
.HasForeignKey(o => o.RoleId);
entity.Property(e => e.SecurityClaimId)
.IsRequired()
.HasMaxLength(32);
entity.HasOne(e => e.SecurityClaim)
.WithMany(p => p.Roles)
.HasForeignKey(o => o.SecurityClaimId);
});
modelBuilder.Entity<User>(entity =>
{
string sql = modelBuilder.CreateNpgSequence("user_seq", schemaName);
entity.Property(e => e.UserId)
.IsRequired()
.HasDefaultValueSql(sql);
entity.Property(e => e.CreatedOn)
.IsRequired()
.HasColumnType("timestamp WITH TIME ZONE")
.HasDefaultValueSql("current_timestamp AT TIME ZONE 'UTC'");
entity.Property(e => e.CultureName)
.IsRequired();
entity.Property(e => e.DisplayName)
.IsRequired()
.HasMaxLength(128);
entity.Property(e => e.Enabled);
entity.Property(e => e.Username)
.IsRequired()
.HasMaxLength(128);
entity.Property(e => e.TimeZoneId)
.IsRequired()
.HasMaxLength(32);
entity.HasOne(e => e.CreatedByUser)
.WithMany()
.HasForeignKey(o => o.CreatedBy)
.IsRequired(false);
entity.HasOne(e => e.LastUpdatedByUser)
.WithMany()
.HasForeignKey(o => o.LastUpdatedBy)
.IsRequired(false);
});
modelBuilder.Entity<UserProvider>(entity =>
{
entity.HasKey(e => new { e.ProviderId, e.UserId })
.HasName("PK_UserProvider");
entity.HasIndex(e => e.UserId)
.HasName("IX_UserProvider_UserId");
entity.Property(e => e.ProviderId)
.HasMaxLength(64);
entity.Property(e => e.CreatedOn)
.IsRequired()
.HasColumnType("timestamp WITH TIME ZONE")
.HasDefaultValueSql("current_timestamp AT TIME ZONE 'UTC'");
entity.Property(e => e.ExternalId)
.HasMaxLength(64);
entity.HasOne(d => d.Provider)
.WithMany(p => p.Users)
.HasForeignKey(d => d.ProviderId)
.HasConstraintName("FK_UserProvider_Provider");
entity.HasOne(d => d.User)
.WithMany(p => p.Providers)
.HasForeignKey(d => d.UserId)
.HasConstraintName("FK_UserProvider_User");
entity.HasDiscriminator<string>("UserProviderType")
.HasValue<UserProvider>("External")
.HasValue<UserProviderLocal>("Local");
});
modelBuilder.Entity<Verification>(entity =>
{
entity.HasKey(e => e.Code)
.HasName("PK_Verification");
entity.HasIndex(e => e.UserId)
.HasName("IX_Verification_UserId");
entity.Property(e => e.Code)
.IsRequired(true)
.HasMaxLength(64);
entity.Property(e => e.Fingerprint)
.IsRequired(true)
.HasMaxLength(256);
entity.Property(e => e.ProviderKey)
.IsRequired(true)
.HasMaxLength(64);
entity.Property(e => e.IssuedAt)
.IsRequired()
.HasColumnType("timestamp WITH TIME ZONE")
.HasDefaultValueSql("current_timestamp AT TIME ZONE 'UTC'");
entity.Property(e => e.RedeemedAt)
.IsRequired(false)
.HasColumnType("timestamp WITH TIME ZONE");
entity.HasOne(d => d.User)
.WithMany(d => d.Verifications)
.HasForeignKey(d => d.UserId)
.HasConstraintName("FK_Verification_User");
});
modelBuilder.Entity<UserProviderLocal>(entity =>
{
entity.Property(e => e.PasswordSalt)
.HasMaxLength(128);
entity.Property(e => e.PasswordHash)
.HasMaxLength(256);
});
modelBuilder.Entity<UserRole>(entity =>
{
entity.HasKey(e => new { e.RoleId, e.UserId })
.HasName("PK_UserRole");
entity.HasIndex(e => e.UserId)
.HasName("IX_UserRole_UserId");
entity.Property(e => e.RoleId).HasMaxLength(32);
entity.HasOne(d => d.Role)
.WithMany(p => p.Users)
.HasForeignKey(d => d.RoleId)
.OnDelete(DeleteBehavior.ClientSetNull);
entity.HasOne(d => d.User)
.WithMany(p => p.Roles)
.HasForeignKey(d => d.UserId)
.HasConstraintName("FK_UserRole_User");
});
}
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace Northwind
{
/// <summary>
/// Strongly-typed collection for the Territory class.
/// </summary>
[Serializable]
public partial class TerritoryCollection : ActiveList<Territory, TerritoryCollection>
{
public TerritoryCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>TerritoryCollection</returns>
public TerritoryCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
Territory o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the Territories table.
/// </summary>
[Serializable]
public partial class Territory : ActiveRecord<Territory>, IActiveRecord
{
#region .ctors and Default Settings
public Territory()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public Territory(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public Territory(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public Territory(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Territories", TableType.Table, DataService.GetInstance("Northwind"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarTerritoryID = new TableSchema.TableColumn(schema);
colvarTerritoryID.ColumnName = "TerritoryID";
colvarTerritoryID.DataType = DbType.String;
colvarTerritoryID.MaxLength = 20;
colvarTerritoryID.AutoIncrement = false;
colvarTerritoryID.IsNullable = false;
colvarTerritoryID.IsPrimaryKey = true;
colvarTerritoryID.IsForeignKey = false;
colvarTerritoryID.IsReadOnly = false;
colvarTerritoryID.DefaultSetting = @"";
colvarTerritoryID.ForeignKeyTableName = "";
schema.Columns.Add(colvarTerritoryID);
TableSchema.TableColumn colvarTerritoryDescription = new TableSchema.TableColumn(schema);
colvarTerritoryDescription.ColumnName = "TerritoryDescription";
colvarTerritoryDescription.DataType = DbType.String;
colvarTerritoryDescription.MaxLength = 50;
colvarTerritoryDescription.AutoIncrement = false;
colvarTerritoryDescription.IsNullable = false;
colvarTerritoryDescription.IsPrimaryKey = false;
colvarTerritoryDescription.IsForeignKey = false;
colvarTerritoryDescription.IsReadOnly = false;
colvarTerritoryDescription.DefaultSetting = @"";
colvarTerritoryDescription.ForeignKeyTableName = "";
schema.Columns.Add(colvarTerritoryDescription);
TableSchema.TableColumn colvarRegionID = new TableSchema.TableColumn(schema);
colvarRegionID.ColumnName = "RegionID";
colvarRegionID.DataType = DbType.Int32;
colvarRegionID.MaxLength = 0;
colvarRegionID.AutoIncrement = false;
colvarRegionID.IsNullable = false;
colvarRegionID.IsPrimaryKey = false;
colvarRegionID.IsForeignKey = true;
colvarRegionID.IsReadOnly = false;
colvarRegionID.DefaultSetting = @"";
colvarRegionID.ForeignKeyTableName = "Region";
schema.Columns.Add(colvarRegionID);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["Northwind"].AddSchema("Territories",schema);
}
}
#endregion
#region Props
[XmlAttribute("TerritoryID")]
[Bindable(true)]
public string TerritoryID
{
get { return GetColumnValue<string>(Columns.TerritoryID); }
set { SetColumnValue(Columns.TerritoryID, value); }
}
[XmlAttribute("TerritoryDescription")]
[Bindable(true)]
public string TerritoryDescription
{
get { return GetColumnValue<string>(Columns.TerritoryDescription); }
set { SetColumnValue(Columns.TerritoryDescription, value); }
}
[XmlAttribute("RegionID")]
[Bindable(true)]
public int RegionID
{
get { return GetColumnValue<int>(Columns.RegionID); }
set { SetColumnValue(Columns.RegionID, value); }
}
#endregion
#region PrimaryKey Methods
protected override void SetPrimaryKey(object oValue)
{
base.SetPrimaryKey(oValue);
SetPKValues();
}
public Northwind.EmployeeTerritoryCollection EmployeeTerritories()
{
return new Northwind.EmployeeTerritoryCollection().Where(EmployeeTerritory.Columns.TerritoryID, TerritoryID).Load();
}
#endregion
#region ForeignKey Properties
/// <summary>
/// Returns a Region ActiveRecord object related to this Territory
///
/// </summary>
public Northwind.Region Region
{
get { return Northwind.Region.FetchByID(this.RegionID); }
set { SetColumnValue("RegionID", value.RegionID); }
}
#endregion
#region Many To Many Helpers
public Northwind.EmployeeCollection GetEmployeeCollection() { return Territory.GetEmployeeCollection(this.TerritoryID); }
public static Northwind.EmployeeCollection GetEmployeeCollection(string varTerritoryID)
{
SubSonic.QueryCommand cmd = new SubSonic.QueryCommand("SELECT * FROM [dbo].[Employees] INNER JOIN [EmployeeTerritories] ON [Employees].[EmployeeID] = [EmployeeTerritories].[EmployeeID] WHERE [EmployeeTerritories].[TerritoryID] = @TerritoryID", Territory.Schema.Provider.Name);
cmd.AddParameter("@TerritoryID", varTerritoryID, DbType.String);
IDataReader rdr = SubSonic.DataService.GetReader(cmd);
EmployeeCollection coll = new EmployeeCollection();
coll.LoadAndCloseReader(rdr);
return coll;
}
public static void SaveEmployeeMap(string varTerritoryID, EmployeeCollection items)
{
QueryCommandCollection coll = new SubSonic.QueryCommandCollection();
//delete out the existing
QueryCommand cmdDel = new QueryCommand("DELETE FROM [EmployeeTerritories] WHERE [EmployeeTerritories].[TerritoryID] = @TerritoryID", Territory.Schema.Provider.Name);
cmdDel.AddParameter("@TerritoryID", varTerritoryID, DbType.String);
coll.Add(cmdDel);
DataService.ExecuteTransaction(coll);
foreach (Employee item in items)
{
EmployeeTerritory varEmployeeTerritory = new EmployeeTerritory();
varEmployeeTerritory.SetColumnValue("TerritoryID", varTerritoryID);
varEmployeeTerritory.SetColumnValue("EmployeeID", item.GetPrimaryKeyValue());
varEmployeeTerritory.Save();
}
}
public static void SaveEmployeeMap(string varTerritoryID, System.Web.UI.WebControls.ListItemCollection itemList)
{
QueryCommandCollection coll = new SubSonic.QueryCommandCollection();
//delete out the existing
QueryCommand cmdDel = new QueryCommand("DELETE FROM [EmployeeTerritories] WHERE [EmployeeTerritories].[TerritoryID] = @TerritoryID", Territory.Schema.Provider.Name);
cmdDel.AddParameter("@TerritoryID", varTerritoryID, DbType.String);
coll.Add(cmdDel);
DataService.ExecuteTransaction(coll);
foreach (System.Web.UI.WebControls.ListItem l in itemList)
{
if (l.Selected)
{
EmployeeTerritory varEmployeeTerritory = new EmployeeTerritory();
varEmployeeTerritory.SetColumnValue("TerritoryID", varTerritoryID);
varEmployeeTerritory.SetColumnValue("EmployeeID", l.Value);
varEmployeeTerritory.Save();
}
}
}
public static void SaveEmployeeMap(string varTerritoryID , int[] itemList)
{
QueryCommandCollection coll = new SubSonic.QueryCommandCollection();
//delete out the existing
QueryCommand cmdDel = new QueryCommand("DELETE FROM [EmployeeTerritories] WHERE [EmployeeTerritories].[TerritoryID] = @TerritoryID", Territory.Schema.Provider.Name);
cmdDel.AddParameter("@TerritoryID", varTerritoryID, DbType.String);
coll.Add(cmdDel);
DataService.ExecuteTransaction(coll);
foreach (int item in itemList)
{
EmployeeTerritory varEmployeeTerritory = new EmployeeTerritory();
varEmployeeTerritory.SetColumnValue("TerritoryID", varTerritoryID);
varEmployeeTerritory.SetColumnValue("EmployeeID", item);
varEmployeeTerritory.Save();
}
}
public static void DeleteEmployeeMap(string varTerritoryID)
{
QueryCommand cmdDel = new QueryCommand("DELETE FROM [EmployeeTerritories] WHERE [EmployeeTerritories].[TerritoryID] = @TerritoryID", Territory.Schema.Provider.Name);
cmdDel.AddParameter("@TerritoryID", varTerritoryID, DbType.String);
DataService.ExecuteQuery(cmdDel);
}
#endregion
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(string varTerritoryID,string varTerritoryDescription,int varRegionID)
{
Territory item = new Territory();
item.TerritoryID = varTerritoryID;
item.TerritoryDescription = varTerritoryDescription;
item.RegionID = varRegionID;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(string varTerritoryID,string varTerritoryDescription,int varRegionID)
{
Territory item = new Territory();
item.TerritoryID = varTerritoryID;
item.TerritoryDescription = varTerritoryDescription;
item.RegionID = varRegionID;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn TerritoryIDColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn TerritoryDescriptionColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn RegionIDColumn
{
get { return Schema.Columns[2]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string TerritoryID = @"TerritoryID";
public static string TerritoryDescription = @"TerritoryDescription";
public static string RegionID = @"RegionID";
}
#endregion
#region Update PK Collections
public void SetPKValues()
{
}
#endregion
#region Deep Save
public void DeepSave()
{
Save();
}
#endregion
}
}
| |
#region License
//
// Copyright (c) 2007-2009, Sean Chambers <schambers80@gmail.com>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.IO;
using FluentMigrator.Runner;
using FluentMigrator.Runner.Announcers;
using FluentMigrator.Runner.Initialization;
using FluentMigrator.Runner.Processors;
using Mono.Options;
namespace FluentMigrator.Console
{
public class MigratorConsole
{
private readonly TextWriter _announcerOutput;
public string ProcessorType;
public string Connection;
public bool Verbose;
public bool PreviewOnly;
public string Namespace;
public string Task;
public bool Output;
public string OutputFilename;
public long Version;
public int Steps;
public string TargetAssembly;
public string WorkingDirectory;
public string Profile;
public int Timeout;
public bool ShowHelp;
public string ConnectionStringConfigPath;
static void DisplayHelp(OptionSet p)
{
const string hr = "-------------------------------------------------------------------------------";
System.Console.WriteLine(hr);
System.Console.WriteLine("=============================== FluentMigrator ================================");
System.Console.WriteLine(hr);
System.Console.WriteLine("Source Code:");
System.Console.WriteLine(" http://github.com/schambers/fluentmigrator/network");
System.Console.WriteLine("Ask For Help:");
System.Console.WriteLine(" http://groups.google.com/group/fluentmigrator-google-group");
System.Console.WriteLine(hr);
System.Console.WriteLine("Usage:");
System.Console.WriteLine(" migrate [OPTIONS]");
System.Console.WriteLine("Example:");
System.Console.WriteLine(" migrate -a bin\\debug\\MyMigrations.dll -db SqlServer2008 -conn \"SEE_BELOW\" -profile \"Debug\"");
System.Console.WriteLine(hr);
System.Console.WriteLine("Example Connection Strings:");
System.Console.WriteLine(" MySql: Data Source=172.0.0.1;Database=Foo;User Id=USERNAME;Password=BLAH");
System.Console.WriteLine(" Oracle: Server=172.0.0.1;Database=Foo;Uid=USERNAME;Pwd=BLAH");
System.Console.WriteLine(" SqlLite: Data Source=:memory:;Version=3;New=True");
System.Console.WriteLine(" SqlServer: server=127.0.0.1;database=Foo;user id=USERNAME;password=BLAH");
System.Console.WriteLine(" server=.\\SQLExpress;database=Foo;trusted_connection=true");
System.Console.WriteLine(" ");
System.Console.WriteLine("OR use a named connection string from the machine.config:");
System.Console.WriteLine(" migrate -a bin\\debug\\MyMigrations.dll -db SqlServer2008 -connectionName \"namedConnection\" -profile \"Debug\"");
System.Console.WriteLine(hr);
System.Console.WriteLine("Options:");
p.WriteOptionDescriptions(System.Console.Out);
}
public MigratorConsole(params string[] args)
: this(System.Console.Out, args)
{
}
public MigratorConsole(TextWriter announcerOutput, params string[] args)
{
_announcerOutput = announcerOutput;
try
{
var optionSet = new OptionSet
{
{
"assembly=|a=|target=",
"REQUIRED. The assembly containing the migrations you want to execute.",
v => { TargetAssembly = v; }
},
{
"provider=|dbType=|db=",
string.Format("REQUIRED. The kind of database you are migrating against. Available choices are: {0}.",
ProcessorFactory.ListAvailableProcessorTypes()),
v => { ProcessorType = v; }
},
{
"connectionString=|connection=|conn=|c=",
"The name of the connection string (falls back to machine name) or the connection string itself to the server and database you want to execute your migrations against.",
v => { Connection = v; }
},
{
"connectionStringConfigPath=|configPath=",
string.Format("The path of the machine.config where the connection string named by connectionString"+
" is found. If not specified, it defaults to the machine.config used by the currently running CLR version"),
v => { ConnectionStringConfigPath = v; }
},
{
"namespace=|ns=",
"The namespace contains the migrations you want to run. Default is all migrations found within the Target Assembly will be run.",
v => { Namespace = v; }
},
{
"output|out|o",
"Output generated SQL to a file. Default is no output. Use outputFilename to control the filename, otherwise [assemblyname].sql is the default.",
v => { Output = true; }
},
{
"outputFilename=|outfile=|of=",
"The name of the file to output the generated SQL to. The output option must be included for output to be saved to the file.",
v => { OutputFilename = v; }
},
{
"preview|p",
"Only output the SQL generated by the migration - do not execute it. Default is false.",
v => { PreviewOnly = true; }
},
{
"steps=",
"The number of versions to rollback if the task is 'rollback'. Default is 1.",
v => { Steps = int.Parse(v); }
},
{
"task=|t=",
"The task you want FluentMigrator to perform. Available choices are: migrate:up, migrate (same as migrate:up), migrate:down, rollback, rollback:toversion, rollback:all. Default is 'migrate'.",
v => { Task = v; }
},
{
"version=",
"The specific version to migrate. Default is 0, which will run all migrations.",
v => { Version = long.Parse(v); }
},
{
"verbose=",
"Show the SQL statements generated and execution time in the console. Default is false.",
v => { Verbose = true; }
},
{
"workingdirectory=|wd=",
"The directory to load SQL scripts specified by migrations from.",
v => { WorkingDirectory = v; }
},
{
"profile=",
"The profile to run after executing migrations.",
v => { Profile = v; }
},
{
"timeout=",
"Overrides the default SqlCommand timeout of 30 seconds.",
v => { Timeout = int.Parse(v); }
},
{
"help|h|?",
"Displays this help menu.",
v => { ShowHelp = true; }
}
};
try
{
optionSet.Parse(args);
}
catch (OptionException e)
{
System.Console.WriteLine("FluentMigrator.Console:");
System.Console.WriteLine(e.Message);
System.Console.WriteLine("Try 'migrate --help' for more information.");
return;
}
if (string.IsNullOrEmpty(Task))
Task = "migrate";
if (string.IsNullOrEmpty(ProcessorType) ||
string.IsNullOrEmpty(TargetAssembly))
{
DisplayHelp(optionSet);
Environment.ExitCode = 1;
return;
}
if (ShowHelp)
{
DisplayHelp(optionSet);
return;
}
if (Output)
{
if (string.IsNullOrEmpty(OutputFilename))
OutputFilename = TargetAssembly + ".sql";
ExecuteMigrations(OutputFilename);
}
else
ExecuteMigrations();
}
catch (Exception ex)
{
System.Console.Error.WriteLine("!! An error has occurred. The error is:");
System.Console.Error.WriteLine(ex);
Environment.ExitCode = 1;
}
}
private void ExecuteMigrations()
{
var consoleAnnouncer = new TextWriterAnnouncer(_announcerOutput)
{
ShowElapsedTime = Verbose,
ShowSql = Verbose
};
ExecuteMigrations(consoleAnnouncer);
}
private void ExecuteMigrations(string outputTo)
{
using (var sw = new StreamWriter(outputTo))
{
var fileAnnouncer = new TextWriterAnnouncer(sw)
{
ShowElapsedTime = false,
ShowSql = true
};
var consoleAnnouncer = new TextWriterAnnouncer(_announcerOutput)
{
ShowElapsedTime = Verbose,
ShowSql = Verbose
};
var announcer = new CompositeAnnouncer(new[]
{
consoleAnnouncer,
fileAnnouncer
});
ExecuteMigrations(announcer);
}
}
private void ExecuteMigrations(IAnnouncer announcer)
{
var runnerContext = new RunnerContext(announcer)
{
Database = ProcessorType,
Connection = Connection,
Target = TargetAssembly,
PreviewOnly = PreviewOnly,
Namespace = Namespace,
Task = Task,
Version = Version,
Steps = Steps,
WorkingDirectory = WorkingDirectory,
Profile = Profile,
Timeout = Timeout,
ConnectionStringConfigPath = ConnectionStringConfigPath,
};
new TaskExecutor(runnerContext).Execute();
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System;
using System.IO;
using System.Linq;
using System.Collections.Generic;
using V1=AssetBundleGraph;
using Model=UnityEngine.AssetBundles.GraphTool.DataModel.Version2;
namespace UnityEngine.AssetBundles.GraphTool {
[CustomNode("Build/Build Asset Bundles", 90)]
public class BundleBuilder : Node, Model.NodeDataImporter {
struct AssetImporterSetting {
private AssetImporter importer;
private string assetBundleName;
private string assetBundleVariant;
public AssetImporterSetting(AssetImporter imp) {
importer = imp;
assetBundleName = importer.assetBundleName;
assetBundleVariant = importer.assetBundleVariant;
}
public void WriteBack() {
importer.SetAssetBundleNameAndVariant (assetBundleName, assetBundleVariant);
importer.SaveAndReimport ();
}
}
public enum OutputOption : int {
BuildInCacheDirectory,
ErrorIfNoOutputDirectoryFound,
AutomaticallyCreateIfNoOutputDirectoryFound,
DeleteAndRecreateOutputDirectory
}
private static readonly string key = "0";
[SerializeField] private SerializableMultiTargetInt m_enabledBundleOptions;
[SerializeField] private SerializableMultiTargetString m_outputDir;
[SerializeField] private SerializableMultiTargetInt m_outputOption;
[SerializeField] private bool m_overwriteImporterSetting;
public override string ActiveStyle {
get {
return "node 5 on";
}
}
public override string InactiveStyle {
get {
return "node 5";
}
}
public override string Category {
get {
return "Build";
}
}
public override Model.NodeOutputSemantics NodeInputType {
get {
return Model.NodeOutputSemantics.AssetBundleConfigurations;
}
}
public override Model.NodeOutputSemantics NodeOutputType {
get {
return Model.NodeOutputSemantics.AssetBundles;
}
}
public override void Initialize(Model.NodeData data) {
m_enabledBundleOptions = new SerializableMultiTargetInt();
m_outputDir = new SerializableMultiTargetString();
m_outputOption = new SerializableMultiTargetInt((int)OutputOption.BuildInCacheDirectory);
data.AddDefaultInputPoint();
data.AddDefaultOutputPoint();
}
public void Import(V1.NodeData v1, Model.NodeData v2) {
m_enabledBundleOptions = new SerializableMultiTargetInt(v1.BundleBuilderBundleOptions);
m_outputDir = new SerializableMultiTargetString();
m_outputOption = new SerializableMultiTargetInt((int)OutputOption.BuildInCacheDirectory);
}
public override Node Clone(Model.NodeData newData) {
var newNode = new BundleBuilder();
newNode.m_enabledBundleOptions = new SerializableMultiTargetInt(m_enabledBundleOptions);
newNode.m_outputDir = new SerializableMultiTargetString(m_outputDir);
newNode.m_outputOption = new SerializableMultiTargetInt(m_outputOption);
newData.AddDefaultInputPoint();
newData.AddDefaultOutputPoint();
return newNode;
}
public override void OnInspectorGUI(NodeGUI node, AssetReferenceStreamManager streamManager, NodeGUIEditor editor, Action onValueChanged) {
if (m_enabledBundleOptions == null) {
return;
}
EditorGUILayout.HelpBox("Build Asset Bundles: Build asset bundles with given asset bundle settings.", MessageType.Info);
editor.UpdateNodeName(node);
bool newOverwrite = EditorGUILayout.ToggleLeft ("Keep AssetImporter settings for variants", m_overwriteImporterSetting);
if (newOverwrite != m_overwriteImporterSetting) {
using(new RecordUndoScope("Remove Target Bundle Options", node, true)){
m_overwriteImporterSetting = newOverwrite;
onValueChanged();
}
}
GUILayout.Space(10f);
//Show target configuration tab
editor.DrawPlatformSelector(node);
using (new EditorGUILayout.VerticalScope(GUI.skin.box)) {
var disabledScope = editor.DrawOverrideTargetToggle(node, m_enabledBundleOptions.ContainsValueOf(editor.CurrentEditingGroup), (bool enabled) => {
using(new RecordUndoScope("Remove Target Bundle Options", node, true)){
if(enabled) {
m_enabledBundleOptions[editor.CurrentEditingGroup] = m_enabledBundleOptions.DefaultValue;
m_outputDir[editor.CurrentEditingGroup] = m_outputDir.DefaultValue;
m_outputOption[editor.CurrentEditingGroup] = m_outputOption.DefaultValue;
} else {
m_enabledBundleOptions.Remove(editor.CurrentEditingGroup);
m_outputDir.Remove(editor.CurrentEditingGroup);
m_outputOption.Remove(editor.CurrentEditingGroup);
}
onValueChanged();
}
} );
using (disabledScope) {
OutputOption opt = (OutputOption)m_outputOption[editor.CurrentEditingGroup];
var newOption = (OutputOption)EditorGUILayout.EnumPopup("Output Option", opt);
if(newOption != opt) {
using(new RecordUndoScope("Change Output Option", node, true)){
m_outputOption[editor.CurrentEditingGroup] = (int)newOption;
onValueChanged();
}
}
using (new EditorGUI.DisabledScope (opt == OutputOption.BuildInCacheDirectory)) {
var newDirPath = editor.DrawFolderSelector ("Output Directory", "Select Output Folder",
m_outputDir[editor.CurrentEditingGroup],
Application.dataPath + "/../",
(string folderSelected) => {
var projectPath = Directory.GetParent(Application.dataPath).ToString();
if(projectPath == folderSelected) {
folderSelected = string.Empty;
} else {
var index = folderSelected.IndexOf(projectPath);
if(index >= 0 ) {
folderSelected = folderSelected.Substring(projectPath.Length + index);
if(folderSelected.IndexOf('/') == 0) {
folderSelected = folderSelected.Substring(1);
}
}
}
return folderSelected;
}
);
if (newDirPath != m_outputDir[editor.CurrentEditingGroup]) {
using(new RecordUndoScope("Change Output Directory", node, true)){
m_outputDir[editor.CurrentEditingGroup] = newDirPath;
onValueChanged();
}
}
if (opt == OutputOption.ErrorIfNoOutputDirectoryFound &&
!string.IsNullOrEmpty(m_outputDir [editor.CurrentEditingGroup]) &&
!Directory.Exists (m_outputDir [editor.CurrentEditingGroup]))
{
using (new EditorGUILayout.HorizontalScope()) {
EditorGUILayout.LabelField(m_outputDir[editor.CurrentEditingGroup] + " does not exist.");
if(GUILayout.Button("Create directory")) {
Directory.CreateDirectory(m_outputDir[editor.CurrentEditingGroup]);
}
}
EditorGUILayout.Space();
string parentDir = Path.GetDirectoryName(m_outputDir[editor.CurrentEditingGroup]);
if(Directory.Exists(parentDir)) {
EditorGUILayout.LabelField("Available Directories:");
string[] dirs = Directory.GetDirectories(parentDir);
foreach(string s in dirs) {
EditorGUILayout.LabelField(s);
}
}
EditorGUILayout.Space();
}
var outputDir = PrepareOutputDirectory (BuildTargetUtility.GroupToTarget(editor.CurrentEditingGroup), node.Data, false, false);
using (new EditorGUI.DisabledScope (!Directory.Exists (outputDir)))
{
using (new EditorGUILayout.HorizontalScope ()) {
GUILayout.FlexibleSpace ();
#if UNITY_EDITOR_OSX
string buttonName = "Reveal in Finder";
#else
string buttonName = "Show in Explorer";
#endif
if (GUILayout.Button (buttonName)) {
EditorUtility.RevealInFinder (outputDir);
}
}
}
}
int bundleOptions = m_enabledBundleOptions[editor.CurrentEditingGroup];
bool isDisableWriteTypeTreeEnabled = 0 < (bundleOptions & (int)BuildAssetBundleOptions.DisableWriteTypeTree);
bool isIgnoreTypeTreeChangesEnabled = 0 < (bundleOptions & (int)BuildAssetBundleOptions.IgnoreTypeTreeChanges);
// buildOptions are validated during loading. Two flags should not be true at the same time.
UnityEngine.Assertions.Assert.IsFalse(isDisableWriteTypeTreeEnabled && isIgnoreTypeTreeChangesEnabled);
bool isSomethingDisabled = isDisableWriteTypeTreeEnabled || isIgnoreTypeTreeChangesEnabled;
foreach (var option in Model.Settings.BundleOptionSettings) {
// contains keyword == enabled. if not, disabled.
bool isEnabled = (bundleOptions & (int)option.option) != 0;
bool isToggleDisabled =
(option.option == BuildAssetBundleOptions.DisableWriteTypeTree && isIgnoreTypeTreeChangesEnabled) ||
(option.option == BuildAssetBundleOptions.IgnoreTypeTreeChanges && isDisableWriteTypeTreeEnabled);
using(new EditorGUI.DisabledScope(isToggleDisabled)) {
var result = EditorGUILayout.ToggleLeft(option.description, isEnabled);
if (result != isEnabled) {
using(new RecordUndoScope("Change Bundle Options", node, true)){
bundleOptions = (result) ?
((int)option.option | bundleOptions) :
(((~(int)option.option)) & bundleOptions);
m_enabledBundleOptions[editor.CurrentEditingGroup] = bundleOptions;
onValueChanged();
}
}
}
}
if(isSomethingDisabled) {
EditorGUILayout.HelpBox("'Disable Write Type Tree' and 'Ignore Type Tree Changes' can not be used together.", MessageType.Info);
}
}
}
}
public override void Prepare (BuildTarget target,
Model.NodeData node,
IEnumerable<PerformGraph.AssetGroups> incoming,
IEnumerable<Model.ConnectionData> connectionsToOutput,
PerformGraph.Output Output)
{
// BundleBuilder do nothing without incoming connections
if(incoming == null) {
return;
}
var bundleOutputDir = PrepareOutputDirectory (target, node, false, true);
var bundleNames = incoming.SelectMany(v => v.assetGroups.Keys).Distinct().ToList();
var bundleVariants = new Dictionary<string, List<string>>();
// get all variant name for bundles
foreach(var ag in incoming) {
foreach(var name in ag.assetGroups.Keys) {
if(!bundleVariants.ContainsKey(name)) {
bundleVariants[name] = new List<string>();
}
var assets = ag.assetGroups[name];
foreach(var a in assets) {
var variantName = a.variantName;
if(!bundleVariants[name].Contains(variantName)) {
bundleVariants[name].Add(variantName);
}
}
}
}
// add manifest file
var manifestName = GetManifestName(target);
bundleNames.Add( manifestName );
bundleVariants[manifestName] = new List<string>() {""};
if(connectionsToOutput != null && Output != null) {
UnityEngine.Assertions.Assert.IsTrue(connectionsToOutput.Any());
var outputDict = new Dictionary<string, List<AssetReference>>();
outputDict[key] = new List<AssetReference>();
foreach (var name in bundleNames) {
foreach(var v in bundleVariants[name]) {
string bundleName = (string.IsNullOrEmpty(v))? name : name + "." + v;
AssetReference bundle = AssetReferenceDatabase.GetAssetBundleReference( FileUtility.PathCombine(bundleOutputDir, bundleName) );
AssetReference manifest = AssetReferenceDatabase.GetAssetBundleReference( FileUtility.PathCombine(bundleOutputDir, bundleName + Model.Settings.MANIFEST_FOOTER) );
outputDict[key].Add(bundle);
outputDict[key].Add(manifest);
}
}
var dst = (connectionsToOutput == null || !connectionsToOutput.Any())?
null : connectionsToOutput.First();
Output(dst, outputDict);
}
}
public override void Build (BuildTarget target,
Model.NodeData node,
IEnumerable<PerformGraph.AssetGroups> incoming,
IEnumerable<Model.ConnectionData> connectionsToOutput,
PerformGraph.Output Output,
Action<Model.NodeData, string, float> progressFunc)
{
if(incoming == null) {
return;
}
var aggregatedGroups = new Dictionary<string, List<AssetReference>>();
aggregatedGroups[key] = new List<AssetReference>();
if(progressFunc != null) progressFunc(node, "Collecting all inputs...", 0f);
foreach(var ag in incoming) {
foreach(var name in ag.assetGroups.Keys) {
if(!aggregatedGroups.ContainsKey(name)) {
aggregatedGroups[name] = new List<AssetReference>();
}
aggregatedGroups[name].AddRange(ag.assetGroups[name].AsEnumerable());
}
}
var bundleOutputDir = PrepareOutputDirectory (target, node, true, true);
var bundleNames = aggregatedGroups.Keys.ToList();
var bundleVariants = new Dictionary<string, List<string>>();
if(progressFunc != null) progressFunc(node, "Building bundle variants map...", 0.2f);
// get all variant name for bundles
foreach(var name in aggregatedGroups.Keys) {
if(!bundleVariants.ContainsKey(name)) {
bundleVariants[name] = new List<string>();
}
var assets = aggregatedGroups[name];
foreach(var a in assets) {
var variantName = a.variantName;
if(!bundleVariants[name].Contains(variantName)) {
bundleVariants[name].Add(variantName);
}
}
}
int validNames = 0;
foreach (var name in bundleNames) {
var assets = aggregatedGroups[name];
// we do not build bundle without any asset
if( assets.Count > 0 ) {
validNames += bundleVariants[name].Count;
}
}
AssetBundleBuild[] bundleBuild = new AssetBundleBuild[validNames];
List<AssetImporterSetting> importerSetting = null;
if (!m_overwriteImporterSetting) {
importerSetting = new List<AssetImporterSetting> ();
}
int bbIndex = 0;
foreach(var name in bundleNames) {
foreach(var v in bundleVariants[name]) {
var assets = aggregatedGroups[name];
if(assets.Count <= 0) {
continue;
}
bundleBuild[bbIndex].assetBundleName = name;
bundleBuild[bbIndex].assetBundleVariant = v;
bundleBuild[bbIndex].assetNames = assets.Where(x => x.variantName == v).Select(x => x.importFrom).ToArray();
/**
* WORKAROND: This will be unnecessary in future version
* Unity currently have issue in configuring variant assets using AssetBundleBuild[] that
* internal identifier does not match properly unless you configure value in AssetImporter.
*/
if (!string.IsNullOrEmpty (v)) {
foreach (var path in bundleBuild[bbIndex].assetNames) {
AssetImporter importer = AssetImporter.GetAtPath (path);
if (importer.assetBundleName != name || importer.assetBundleVariant != v) {
if (!m_overwriteImporterSetting) {
importerSetting.Add (new AssetImporterSetting(importer));
}
importer.SetAssetBundleNameAndVariant (name, v);
importer.SaveAndReimport ();
}
}
}
++bbIndex;
}
}
if(progressFunc != null) progressFunc(node, "Building Asset Bundles...", 0.7f);
AssetBundleManifest m = BuildPipeline.BuildAssetBundles(bundleOutputDir, bundleBuild, (BuildAssetBundleOptions)m_enabledBundleOptions[target], target);
var output = new Dictionary<string, List<AssetReference>>();
output[key] = new List<AssetReference>();
var generatedFiles = FileUtility.GetAllFilePathsInFolder(bundleOutputDir);
var manifestName = GetManifestName (target);
// add manifest file
bundleVariants.Add( manifestName.ToLower(), new List<string> { null } );
foreach (var path in generatedFiles) {
var fileName = path.Substring(bundleOutputDir.Length+1);
if( IsFileIntendedItem(fileName, bundleVariants) ) {
if (fileName == manifestName) {
output[key].Add( AssetReferenceDatabase.GetAssetBundleManifestReference(path) );
} else {
output[key].Add( AssetReferenceDatabase.GetAssetBundleReference(path) );
}
}
}
if(Output != null) {
var dst = (connectionsToOutput == null || !connectionsToOutput.Any())?
null : connectionsToOutput.First();
Output(dst, output);
}
if (importerSetting != null) {
importerSetting.ForEach (i => i.WriteBack ());
}
AssetBundleBuildReport.AddBuildReport(new AssetBundleBuildReport(node, m, manifestName, bundleBuild, output[key], aggregatedGroups, bundleVariants));
}
private string GetManifestName(BuildTarget target) {
if (string.IsNullOrEmpty (m_outputDir [target])) {
return BuildTargetUtility.TargetToAssetBundlePlatformName (target);
} else {
return Path.GetFileName (m_outputDir [target]);
}
}
private string PrepareOutputDirectory(BuildTarget target, Model.NodeData node, bool autoCreate, bool throwException) {
var outputOption = (OutputOption)m_outputOption [target];
var outputDir = m_outputDir [target];
if(outputOption == OutputOption.BuildInCacheDirectory) {
return FileUtility.EnsureAssetBundleCacheDirExists (target, node);
}
if (throwException) {
if(string.IsNullOrEmpty(outputDir)) {
throw new NodeException (node.Name + ":Output directory is empty.", node.Id);
}
if(outputOption == OutputOption.ErrorIfNoOutputDirectoryFound) {
if (!Directory.Exists (outputDir)) {
throw new NodeException (node.Name + ":Output directory not found.", node.Id);
}
}
}
if (autoCreate) {
if(outputOption == OutputOption.DeleteAndRecreateOutputDirectory) {
if (Directory.Exists(outputDir)) {
FileUtility.DeleteDirectory(outputDir, true);
}
}
if (!Directory.Exists(outputDir)) {
Directory.CreateDirectory(outputDir);
}
}
return outputDir;
}
// Check if given file is generated Item
private bool IsFileIntendedItem(string filename, Dictionary<string, List<string>> bundleVariants) {
filename = filename.ToLower();
int lastDotManifestIndex = filename.LastIndexOf(".manifest");
filename = (lastDotManifestIndex > 0)? filename.Substring(0, lastDotManifestIndex) : filename;
// test if given file is not configured as variant
if(bundleVariants.ContainsKey(filename)) {
var v = bundleVariants[filename];
if(v.Contains(null)) {
return true;
}
}
int lastDotIndex = filename.LastIndexOf('.');
var bundleNameFromFile = (lastDotIndex > 0) ? filename.Substring(0, lastDotIndex): filename;
var variantNameFromFile = (lastDotIndex > 0) ? filename.Substring(lastDotIndex+1): null;
if(!bundleVariants.ContainsKey(bundleNameFromFile)) {
return false;
}
var variants = bundleVariants[bundleNameFromFile];
return variants.Contains(variantNameFromFile);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using Xunit;
public static class PathTests
{
[Theory]
[InlineData(null, null, null)]
[InlineData(null, null, "exe")]
[InlineData("", "", "")]
[InlineData("file", "file.exe", null)]
[InlineData("file.", "file.exe", "")]
[InlineData("file.exe", "file", "exe")]
[InlineData("file.exe", "file", ".exe")]
[InlineData("file.exe", "file.txt", "exe")]
[InlineData("file.exe", "file.txt", ".exe")]
[InlineData("file.txt.exe", "file.txt.bin", "exe")]
[InlineData("dir/file.exe", "dir/file.t", "exe")]
[InlineData("dir/file.t", "dir/file.exe", "t")]
[InlineData("dir/file.exe", "dir/file", "exe")]
public static void ChangeExtension(string expected, string path, string newExtension)
{
if (expected != null)
expected = expected.Replace('/', Path.DirectorySeparatorChar);
if (path != null)
path = path.Replace('/', Path.DirectorySeparatorChar);
Assert.Equal(expected, Path.ChangeExtension(path, newExtension));
}
[Fact]
public static void GetDirectoryName()
{
Assert.Null(Path.GetDirectoryName(null));
Assert.Equal("dir", Path.GetDirectoryName(Path.Combine("dir", "baz")));
Assert.Equal(Path.GetDirectoryName("."), Path.GetDirectoryName("dir"));
Assert.Equal(null, Path.GetDirectoryName(Path.GetPathRoot(Directory.GetCurrentDirectory())));
}
[Theory]
[InlineData(".exe", "file.exe")]
[InlineData("", "file")]
[InlineData(null, null)]
[InlineData("", "file.")]
[InlineData(".s", "file.s")]
[InlineData("", "test/file")]
[InlineData(".extension", "test/file.extension")]
public static void GetExtension(string expected, string path)
{
if (path != null)
path = path.Replace('/', Path.DirectorySeparatorChar);
Assert.Equal(expected, Path.GetExtension(path));
Assert.Equal(!string.IsNullOrEmpty(expected), Path.HasExtension(path));
}
[Fact]
public static void GetFileName()
{
Assert.Equal("file.exe", Path.GetFileName(Path.Combine("bar", "baz", "file.exe")));
Assert.Equal(string.Empty, Path.GetFileName(Path.Combine("bar", "baz") + Path.DirectorySeparatorChar));
}
[Fact]
public static void GetFileNameWithoutExtension()
{
Assert.Equal("file", Path.GetFileNameWithoutExtension(Path.Combine("bar","baz","file.exe")));
Assert.Equal(string.Empty, Path.GetFileNameWithoutExtension(Path.Combine("bar","baz") + Path.DirectorySeparatorChar));
Assert.Null(Path.GetFileNameWithoutExtension(null));
}
[Fact]
public static void GetPathRoot()
{
Assert.Null(Path.GetPathRoot(null));
string cwd = Directory.GetCurrentDirectory();
Assert.Equal(cwd.Substring(0, cwd.IndexOf(Path.DirectorySeparatorChar) + 1), Path.GetPathRoot(cwd));
Assert.True(Path.IsPathRooted(cwd));
Assert.Equal(string.Empty, Path.GetPathRoot(@"file.exe"));
Assert.False(Path.IsPathRooted("file.exe"));
if (Interop.IsWindows) // UNC paths
{
Assert.Equal(@"\\test\unc", Path.GetPathRoot(@"\\test\unc\path\to\something"));
Assert.True(Path.IsPathRooted(@"\\test\unc\path\to\something"));
}
}
[Fact]
public static void GetRandomFileName()
{
var fileNames = new HashSet<string>();
for (int i = 0; i < 100; i++)
{
string s = Path.GetRandomFileName();
Assert.Equal(s.Length, 8 + 1 + 3);
Assert.Equal(s[8], '.');
Assert.True(fileNames.Add(s));
}
}
[Fact]
public static void GetInvalidPathChars()
{
Assert.NotNull(Path.GetInvalidPathChars());
Assert.NotSame(Path.GetInvalidPathChars(), Path.GetInvalidPathChars());
Assert.Equal((IEnumerable<char>)Path.GetInvalidPathChars(), (IEnumerable<char>)Path.GetInvalidPathChars());
Assert.True(Path.GetInvalidPathChars().Length > 0);
Assert.All(Path.GetInvalidPathChars(), c =>
{
string bad = c.ToString();
Assert.Throws<ArgumentException>(() => Path.ChangeExtension(bad, "ok"));
Assert.Throws<ArgumentException>(() => Path.Combine(bad, "ok"));
Assert.Throws<ArgumentException>(() => Path.Combine("ok", "ok", bad));
Assert.Throws<ArgumentException>(() => Path.Combine("ok", "ok", bad, "ok"));
Assert.Throws<ArgumentException>(() => Path.Combine(bad, bad, bad, bad, bad));
Assert.Throws<ArgumentException>(() => Path.GetDirectoryName(bad));
Assert.Throws<ArgumentException>(() => Path.GetExtension(bad));
Assert.Throws<ArgumentException>(() => Path.GetFileName(bad));
Assert.Throws<ArgumentException>(() => Path.GetFileNameWithoutExtension(bad));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(bad));
Assert.Throws<ArgumentException>(() => Path.GetPathRoot(bad));
Assert.Throws<ArgumentException>(() => Path.IsPathRooted(bad));
});
}
[Fact]
public static void GetInvalidFileNameChars()
{
Assert.NotNull(Path.GetInvalidFileNameChars());
Assert.NotSame(Path.GetInvalidFileNameChars(), Path.GetInvalidFileNameChars());
Assert.Equal((IEnumerable<char>)Path.GetInvalidFileNameChars(), (IEnumerable<char>)Path.GetInvalidFileNameChars());
Assert.True(Path.GetInvalidFileNameChars().Length > 0);
}
[Fact]
public static void GetTempPath()
{
string tmpPath = Path.GetTempPath();
Assert.False(string.IsNullOrEmpty(tmpPath));
Assert.Equal(tmpPath, Path.GetTempPath());
Assert.Equal(Path.DirectorySeparatorChar, tmpPath[tmpPath.Length - 1]);
Assert.True(Directory.Exists(tmpPath));
}
[PlatformSpecific(PlatformID.Windows)]
[Theory]
[InlineData(@"C:\Users\someuser\AppData\Local\Temp\", @"C:\Users\someuser\AppData\Local\Temp")]
[InlineData(@"C:\Users\someuser\AppData\Local\Temp\", @"C:\Users\someuser\AppData\Local\Temp\")]
[InlineData(@"C:\", @"C:\")]
[InlineData(@"C:\tmp\", @"C:\tmp")]
[InlineData(@"C:\tmp\", @"C:\tmp\")]
public static void GetTempPath_SetEnvVar_Windows(string expected, string newTempPath)
{
GetTempPath_SetEnvVar("TMP", expected, newTempPath);
}
[PlatformSpecific(PlatformID.AnyUnix)]
[Theory]
[InlineData("/tmp/", "/tmp")]
[InlineData("/tmp/", "/tmp/")]
[InlineData("/", "/")]
[InlineData("/var/tmp/", "/var/tmp")]
[InlineData("/var/tmp/", "/var/tmp/")]
[InlineData("~/", "~")]
[InlineData("~/", "~/")]
[InlineData(".tmp/", ".tmp")]
[InlineData("./tmp/", "./tmp")]
[InlineData("/home/someuser/sometempdir/", "/home/someuser/sometempdir/")]
public static void GetTempPath_SetEnvVar_Unix(string expected, string newTempPath)
{
GetTempPath_SetEnvVar("TMPDIR", expected, newTempPath);
}
private static void GetTempPath_SetEnvVar(string envVar, string expected, string newTempPath)
{
string original = Path.GetTempPath();
Assert.NotNull(original);
try
{
Environment.SetEnvironmentVariable(envVar, newTempPath);
Assert.Equal(
Path.GetFullPath(expected),
Path.GetFullPath(Path.GetTempPath()));
}
finally
{
Environment.SetEnvironmentVariable(envVar, null);
Assert.Equal(original, Path.GetTempPath());
}
}
[Fact]
public static void GetTempFileName()
{
string tmpFile = Path.GetTempFileName();
try
{
Assert.True(File.Exists(tmpFile));
Assert.Equal(".tmp", Path.GetExtension(tmpFile), ignoreCase: true);
using (FileStream fs = File.OpenRead(tmpFile))
Assert.Equal(0, fs.Length);
Assert.Equal(Path.Combine(Path.GetTempPath(), Path.GetFileName(tmpFile)), tmpFile);
}
finally
{
File.Delete(tmpFile);
}
}
[Fact]
public static void GetFullPath()
{
// Basic invalid arg checks
Assert.Throws<ArgumentNullException>(() => Path.GetFullPath(null));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(""));
Assert.Throws<ArgumentException>(() => Path.GetFullPath("http://www.microsoft.com"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath("file://www.microsoft.com"));
// Basic expansions (e.g. self to self, period to self, normalization of lots of periods, etc.)
string curDir = Directory.GetCurrentDirectory();
Assert.Equal(curDir, Path.GetFullPath(curDir));
Assert.Equal(curDir, Path.GetFullPath("."));
Assert.Equal(curDir, Path.GetFullPath(Path.Combine(curDir, ".", ".", ".", ".", ".")));
Assert.Equal(curDir, Path.GetFullPath(curDir + Path.DirectorySeparatorChar + Path.DirectorySeparatorChar + Path.DirectorySeparatorChar + "."));
Assert.Equal(curDir, Path.GetFullPath(Path.Combine(curDir, "..", Path.GetFileName(curDir), ".", "..", Path.GetFileName(curDir))));
Assert.Equal(Path.GetPathRoot(curDir), Path.GetFullPath(Path.Combine(Path.GetPathRoot(curDir), "somedir", "..")));
Assert.Equal(Path.GetPathRoot(curDir), Path.GetFullPath(Path.Combine(Path.GetPathRoot(curDir), ".")));
// Try out a long path that normalizes down to less than MaxPath
var longPath = new StringBuilder(curDir);
for (int i = 0; i < 1000; i++)
longPath.Append(Path.DirectorySeparatorChar).Append('.');
Assert.Equal(curDir, Path.GetFullPath(longPath.ToString()));
// Some Windows-only checks
if (Interop.IsWindows)
{
// Try out a long path that normalizes down to more than MaxPath
for (int i = 0; i < 500; i++)
longPath.Append(Path.DirectorySeparatorChar).Append('a').Append(Path.DirectorySeparatorChar).Append('.');
Assert.Throws<PathTooLongException>(() => Path.GetFullPath(longPath.ToString()));
// alternate data streams aren't supported
Assert.Throws<NotSupportedException>(() => Path.GetFullPath(@"C:\some\bad:path"));
Assert.Throws<NotSupportedException>(() => Path.GetFullPath(@"bad:path"));
// Some Windows-specific bad paths
Assert.Throws<ArgumentException>(() => Path.GetFullPath(Path.DirectorySeparatorChar + ".. ." + Path.DirectorySeparatorChar));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(Path.DirectorySeparatorChar + ". ." + Path.DirectorySeparatorChar));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(Path.DirectorySeparatorChar + " ." + Path.DirectorySeparatorChar));
Assert.Throws<ArgumentException>(() => Path.GetFullPath("C:..."));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"C:...\somedir"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"C :"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"C :\somedir"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"bad::$DATA"));
Assert.Throws<PathTooLongException>(() => Path.GetFullPath(@"C:\" + new string('a', 255) + @"\"));
// Some Windows-specific strange but legal paths
Assert.Equal(
Path.GetFullPath(curDir + Path.DirectorySeparatorChar),
Path.GetFullPath(curDir + Path.DirectorySeparatorChar + ". " + Path.DirectorySeparatorChar));
Assert.Equal(
Path.GetFullPath(Path.GetDirectoryName(curDir) + Path.DirectorySeparatorChar),
Path.GetFullPath(curDir + Path.DirectorySeparatorChar + "..." + Path.DirectorySeparatorChar));
Assert.Equal(
Path.GetFullPath(Path.GetDirectoryName(curDir) + Path.DirectorySeparatorChar),
Path.GetFullPath(curDir + Path.DirectorySeparatorChar + ".. " + Path.DirectorySeparatorChar));
// Windows-specific UNC paths
Assert.Equal(@"\\server\share", Path.GetFullPath(@"\\server\share"));
Assert.Equal(@"\\server\share", Path.GetFullPath(@" \\server\share"));
Assert.Equal(@"\\server\share\dir", Path.GetFullPath(@"\\server\share\dir"));
Assert.Equal(@"\\server\share", Path.GetFullPath(@"\\server\share\."));
Assert.Equal(@"\\server\share", Path.GetFullPath(@"\\server\share\.."));
Assert.Equal(@"\\server\share\", Path.GetFullPath(@"\\server\share\ "));
Assert.Equal(@"\\server\ share\", Path.GetFullPath(@"\\server\ share\"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"\\"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"\\server"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"\\server\"));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"\\server\.."));
Assert.Throws<ArgumentException>(() => Path.GetFullPath(@"\\?\GLOBALROOT\"));
// Windows short paths
string tempFilePath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N") + ".txt");
File.Create(tempFilePath).Dispose();
try
{
// Validate a short name can be expanded
var sb = new StringBuilder(260);
if (GetShortPathName(tempFilePath, sb, sb.Capacity) > 0) // only proceed if we could successfully create the short name
{
Assert.Equal(tempFilePath, Path.GetFullPath(sb.ToString()));
// Validate case where short name doesn't expand to a real file
string invalidShortName = @"S:\DOESNT~1\USERNA~1.RED\LOCALS~1\Temp\bg3ylpzp";
Assert.Equal(invalidShortName, Path.GetFullPath(invalidShortName));
// Same thing, but with a long path that normalizes down to a short enough one
var shortLongName = new StringBuilder(invalidShortName);
for (int i = 0; i < 1000; i++)
shortLongName.Append(Path.DirectorySeparatorChar).Append('.');
Assert.Equal(invalidShortName, Path.GetFullPath(shortLongName.ToString()));
}
}
finally
{
File.Delete(tempFilePath);
}
}
}
// Windows-only P/Invoke to create 8.3 short names from long names
[DllImport("kernel32.dll", CharSet = CharSet.Ansi)]
private static extern uint GetShortPathName(string lpszLongPath, StringBuilder lpszShortPath, int cchBuffer);
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml.Serialization;
namespace Wyam.Feeds.Syndication.Rss
{
/// <summary>
/// Really Simple Syndication (RSS 2.0)
/// http://www.rssboard.org/rss-specification
/// http://blogs.law.harvard.edu/tech/rss
/// </summary>
[XmlRoot(RootElement, Namespace=Namespace)]
public class RssFeed : RssBase, IFeed
{
public const string SpecificationUrl = "http://blogs.law.harvard.edu/tech/rss";
protected internal const string RootElement = "rss";
protected internal const string Namespace = "";
public const string MimeType = "application/rss+xml";
private RssChannel _channel = null;
private Version _version = new Version(2,0);
public RssFeed()
{
}
public RssFeed(IFeed source)
{
// ** IFeedMetadata
// ID
Channel.Link = source.ID.ToString();
// Title
string title = source.Title;
if (!string.IsNullOrWhiteSpace(title))
{
Channel.Title = title;
}
// Description
string description = source.Description;
if (!string.IsNullOrEmpty(description))
{
Channel.Description = description;
}
// Author
string author = source.Author;
if (!string.IsNullOrEmpty(author))
{
Channel.ManagingEditor = new RssPerson
{
Name = author
};
}
// Published
DateTime? published = source.Published;
if (published.HasValue)
{
Channel.PubDate = new RssDate(published.Value);
}
// Updated
DateTime? updated = source.Updated;
if (updated.HasValue)
{
Channel.LastBuildDate = new RssDate(updated.Value);
}
// Link
Uri link = source.Link;
if (link != null)
{
Channel.Link = link.ToString();
}
// ImageLink
Uri imageLink = source.ImageLink;
if (imageLink != null)
{
Channel.Image = new RssImage
{
Url = imageLink.ToString()
};
}
// ** IFeed
// Copyright
string copyright = source.Copyright;
if (!string.IsNullOrEmpty(copyright))
{
Channel.Copyright = copyright;
}
// Items
IList<IFeedItem> sourceItems = source.Items;
if (sourceItems != null)
{
Channel.Items.AddRange(sourceItems.Select(x => new RssItem(x)));
}
}
[XmlElement("channel")]
public RssChannel Channel
{
get { return _channel ?? (_channel = new RssChannel()); }
set { _channel = value; }
}
[XmlAttribute("version")]
public string Version
{
get { return _version?.ToString(); }
set { _version = string.IsNullOrEmpty(value) ? null : new Version(value); }
}
[XmlIgnore]
FeedType IFeed.FeedType => FeedType.Rss;
string IFeed.MimeType => MimeType;
string IFeed.Copyright => Channel.Copyright;
IList<IFeedItem> IFeed.Items => Channel.Items.Cast<IFeedItem>().ToArray();
Uri IFeedMetadata.ID => ((IUriProvider)Channel).Uri;
string IFeedMetadata.Title => Channel.Title;
string IFeedMetadata.Description => Channel.Description;
string IFeedMetadata.Author
{
get
{
if (!Channel.ManagingEditorSpecified)
{
if (!Channel.WebMasterSpecified)
{
return null;
}
if (string.IsNullOrEmpty(Channel.WebMaster.Name))
{
return Channel.WebMaster.Email;
}
return Channel.WebMaster.Name;
}
if (string.IsNullOrEmpty(Channel.ManagingEditor.Name))
{
return Channel.ManagingEditor.Email;
}
return Channel.ManagingEditor.Name;
}
}
DateTime? IFeedMetadata.Published
{
get
{
if (!Channel.PubDate.HasValue)
{
return ((IFeedMetadata)this).Updated;
}
return Channel.PubDate.Value;
}
}
DateTime? IFeedMetadata.Updated
{
get
{
if (!Channel.LastBuildDate.HasValue)
{
return null;
}
return Channel.LastBuildDate.Value;
}
}
Uri IFeedMetadata.Link => ((IUriProvider)Channel).Uri;
Uri IFeedMetadata.ImageLink
{
get
{
if (!Channel.ImageSpecified)
{
return null;
}
return ((IUriProvider)Channel.Image).Uri;
}
}
public override void AddNamespaces(XmlSerializerNamespaces namespaces)
{
namespaces.Add("", Namespace);
Channel.AddNamespaces(namespaces);
base.AddNamespaces(namespaces);
}
}
}
| |
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace Infoplus.Model
{
/// <summary>
///
/// </summary>
[DataContract]
public partial class Shipment : IEquatable<Shipment>
{
/// <summary>
/// Initializes a new instance of the <see cref="Shipment" /> class.
/// Initializes a new instance of the <see cref="Shipment" />class.
/// </summary>
/// <param name="Id">Id.</param>
/// <param name="WarehouseId">WarehouseId (required).</param>
/// <param name="CartonNo">CartonNo.</param>
/// <param name="NumberOfCartons">NumberOfCartons.</param>
/// <param name="Shipped">Shipped (default to false).</param>
/// <param name="CarrierServiceId">CarrierServiceId.</param>
/// <param name="Dim1In">Dim1In.</param>
/// <param name="Dim2In">Dim2In.</param>
/// <param name="Dim3In">Dim3In.</param>
/// <param name="EstimatedZone">EstimatedZone.</param>
/// <param name="CustomFields">CustomFields.</param>
public Shipment(int? Id = null, int? WarehouseId = null, int? CartonNo = null, int? NumberOfCartons = null, bool? Shipped = null, int? CarrierServiceId = null, double? Dim1In = null, double? Dim2In = null, double? Dim3In = null, string EstimatedZone = null, Dictionary<string, Object> CustomFields = null)
{
// to ensure "WarehouseId" is required (not null)
if (WarehouseId == null)
{
throw new InvalidDataException("WarehouseId is a required property for Shipment and cannot be null");
}
else
{
this.WarehouseId = WarehouseId;
}
this.Id = Id;
this.CartonNo = CartonNo;
this.NumberOfCartons = NumberOfCartons;
// use default value if no "Shipped" provided
if (Shipped == null)
{
this.Shipped = false;
}
else
{
this.Shipped = Shipped;
}
this.CarrierServiceId = CarrierServiceId;
this.Dim1In = Dim1In;
this.Dim2In = Dim2In;
this.Dim3In = Dim3In;
this.EstimatedZone = EstimatedZone;
this.CustomFields = CustomFields;
}
/// <summary>
/// Gets or Sets Id
/// </summary>
[DataMember(Name="id", EmitDefaultValue=false)]
public int? Id { get; set; }
/// <summary>
/// Gets or Sets CreateDate
/// </summary>
[DataMember(Name="createDate", EmitDefaultValue=false)]
public DateTime? CreateDate { get; private set; }
/// <summary>
/// Gets or Sets ModifyDate
/// </summary>
[DataMember(Name="modifyDate", EmitDefaultValue=false)]
public DateTime? ModifyDate { get; private set; }
/// <summary>
/// Gets or Sets ShipDate
/// </summary>
[DataMember(Name="shipDate", EmitDefaultValue=false)]
public DateTime? ShipDate { get; private set; }
/// <summary>
/// Gets or Sets DeliveredDate
/// </summary>
[DataMember(Name="deliveredDate", EmitDefaultValue=false)]
public DateTime? DeliveredDate { get; private set; }
/// <summary>
/// Gets or Sets TrackingNo
/// </summary>
[DataMember(Name="trackingNo", EmitDefaultValue=false)]
public string TrackingNo { get; private set; }
/// <summary>
/// Gets or Sets WarehouseId
/// </summary>
[DataMember(Name="warehouseId", EmitDefaultValue=false)]
public int? WarehouseId { get; set; }
/// <summary>
/// Gets or Sets LobId
/// </summary>
[DataMember(Name="lobId", EmitDefaultValue=false)]
public int? LobId { get; private set; }
/// <summary>
/// Gets or Sets OrderNo
/// </summary>
[DataMember(Name="orderNo", EmitDefaultValue=false)]
public double? OrderNo { get; private set; }
/// <summary>
/// Gets or Sets CartonNo
/// </summary>
[DataMember(Name="cartonNo", EmitDefaultValue=false)]
public int? CartonNo { get; set; }
/// <summary>
/// Gets or Sets NumberOfCartons
/// </summary>
[DataMember(Name="numberOfCartons", EmitDefaultValue=false)]
public int? NumberOfCartons { get; set; }
/// <summary>
/// Gets or Sets Status
/// </summary>
[DataMember(Name="status", EmitDefaultValue=false)]
public string Status { get; private set; }
/// <summary>
/// Gets or Sets Shipped
/// </summary>
[DataMember(Name="shipped", EmitDefaultValue=false)]
public bool? Shipped { get; set; }
/// <summary>
/// Gets or Sets CarrierServiceId
/// </summary>
[DataMember(Name="carrierServiceId", EmitDefaultValue=false)]
public int? CarrierServiceId { get; set; }
/// <summary>
/// Gets or Sets Dim1In
/// </summary>
[DataMember(Name="dim1In", EmitDefaultValue=false)]
public double? Dim1In { get; set; }
/// <summary>
/// Gets or Sets Dim2In
/// </summary>
[DataMember(Name="dim2In", EmitDefaultValue=false)]
public double? Dim2In { get; set; }
/// <summary>
/// Gets or Sets Dim3In
/// </summary>
[DataMember(Name="dim3In", EmitDefaultValue=false)]
public double? Dim3In { get; set; }
/// <summary>
/// Gets or Sets EstimatedZone
/// </summary>
[DataMember(Name="estimatedZone", EmitDefaultValue=false)]
public string EstimatedZone { get; set; }
/// <summary>
/// Gets or Sets ParcelAccountNo
/// </summary>
[DataMember(Name="parcelAccountNo", EmitDefaultValue=false)]
public string ParcelAccountNo { get; private set; }
/// <summary>
/// Gets or Sets ThirdPartyParcelAccountNo
/// </summary>
[DataMember(Name="thirdPartyParcelAccountNo", EmitDefaultValue=false)]
public string ThirdPartyParcelAccountNo { get; private set; }
/// <summary>
/// Gets or Sets ManifestId
/// </summary>
[DataMember(Name="manifestId", EmitDefaultValue=false)]
public int? ManifestId { get; private set; }
/// <summary>
/// Gets or Sets Residential
/// </summary>
[DataMember(Name="residential", EmitDefaultValue=false)]
public bool? Residential { get; private set; }
/// <summary>
/// Gets or Sets BillingOption
/// </summary>
[DataMember(Name="billingOption", EmitDefaultValue=false)]
public string BillingOption { get; private set; }
/// <summary>
/// Gets or Sets WeightLbs
/// </summary>
[DataMember(Name="weightLbs", EmitDefaultValue=false)]
public double? WeightLbs { get; private set; }
/// <summary>
/// Gets or Sets DimWeight
/// </summary>
[DataMember(Name="dimWeight", EmitDefaultValue=false)]
public double? DimWeight { get; private set; }
/// <summary>
/// Gets or Sets LicensePlateNumber
/// </summary>
[DataMember(Name="licensePlateNumber", EmitDefaultValue=false)]
public string LicensePlateNumber { get; private set; }
/// <summary>
/// Gets or Sets ChargedFreightAmount
/// </summary>
[DataMember(Name="chargedFreightAmount", EmitDefaultValue=false)]
public double? ChargedFreightAmount { get; private set; }
/// <summary>
/// Gets or Sets PublishedFreightAmount
/// </summary>
[DataMember(Name="publishedFreightAmount", EmitDefaultValue=false)]
public double? PublishedFreightAmount { get; private set; }
/// <summary>
/// Gets or Sets RetailFreightAmount
/// </summary>
[DataMember(Name="retailFreightAmount", EmitDefaultValue=false)]
public double? RetailFreightAmount { get; private set; }
/// <summary>
/// Gets or Sets ExternalShippingSystemId
/// </summary>
[DataMember(Name="externalShippingSystemId", EmitDefaultValue=false)]
public int? ExternalShippingSystemId { get; private set; }
/// <summary>
/// Gets or Sets ShipmentType
/// </summary>
[DataMember(Name="shipmentType", EmitDefaultValue=false)]
public string ShipmentType { get; private set; }
/// <summary>
/// Gets or Sets CustomFields
/// </summary>
[DataMember(Name="customFields", EmitDefaultValue=false)]
public Dictionary<string, Object> CustomFields { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class Shipment {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" CreateDate: ").Append(CreateDate).Append("\n");
sb.Append(" ModifyDate: ").Append(ModifyDate).Append("\n");
sb.Append(" ShipDate: ").Append(ShipDate).Append("\n");
sb.Append(" DeliveredDate: ").Append(DeliveredDate).Append("\n");
sb.Append(" TrackingNo: ").Append(TrackingNo).Append("\n");
sb.Append(" WarehouseId: ").Append(WarehouseId).Append("\n");
sb.Append(" LobId: ").Append(LobId).Append("\n");
sb.Append(" OrderNo: ").Append(OrderNo).Append("\n");
sb.Append(" CartonNo: ").Append(CartonNo).Append("\n");
sb.Append(" NumberOfCartons: ").Append(NumberOfCartons).Append("\n");
sb.Append(" Status: ").Append(Status).Append("\n");
sb.Append(" Shipped: ").Append(Shipped).Append("\n");
sb.Append(" CarrierServiceId: ").Append(CarrierServiceId).Append("\n");
sb.Append(" Dim1In: ").Append(Dim1In).Append("\n");
sb.Append(" Dim2In: ").Append(Dim2In).Append("\n");
sb.Append(" Dim3In: ").Append(Dim3In).Append("\n");
sb.Append(" EstimatedZone: ").Append(EstimatedZone).Append("\n");
sb.Append(" ParcelAccountNo: ").Append(ParcelAccountNo).Append("\n");
sb.Append(" ThirdPartyParcelAccountNo: ").Append(ThirdPartyParcelAccountNo).Append("\n");
sb.Append(" ManifestId: ").Append(ManifestId).Append("\n");
sb.Append(" Residential: ").Append(Residential).Append("\n");
sb.Append(" BillingOption: ").Append(BillingOption).Append("\n");
sb.Append(" WeightLbs: ").Append(WeightLbs).Append("\n");
sb.Append(" DimWeight: ").Append(DimWeight).Append("\n");
sb.Append(" LicensePlateNumber: ").Append(LicensePlateNumber).Append("\n");
sb.Append(" ChargedFreightAmount: ").Append(ChargedFreightAmount).Append("\n");
sb.Append(" PublishedFreightAmount: ").Append(PublishedFreightAmount).Append("\n");
sb.Append(" RetailFreightAmount: ").Append(RetailFreightAmount).Append("\n");
sb.Append(" ExternalShippingSystemId: ").Append(ExternalShippingSystemId).Append("\n");
sb.Append(" ShipmentType: ").Append(ShipmentType).Append("\n");
sb.Append(" CustomFields: ").Append(CustomFields).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
// credit: http://stackoverflow.com/a/10454552/677735
return this.Equals(obj as Shipment);
}
/// <summary>
/// Returns true if Shipment instances are equal
/// </summary>
/// <param name="other">Instance of Shipment to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(Shipment other)
{
// credit: http://stackoverflow.com/a/10454552/677735
if (other == null)
return false;
return
(
this.Id == other.Id ||
this.Id != null &&
this.Id.Equals(other.Id)
) &&
(
this.CreateDate == other.CreateDate ||
this.CreateDate != null &&
this.CreateDate.Equals(other.CreateDate)
) &&
(
this.ModifyDate == other.ModifyDate ||
this.ModifyDate != null &&
this.ModifyDate.Equals(other.ModifyDate)
) &&
(
this.ShipDate == other.ShipDate ||
this.ShipDate != null &&
this.ShipDate.Equals(other.ShipDate)
) &&
(
this.DeliveredDate == other.DeliveredDate ||
this.DeliveredDate != null &&
this.DeliveredDate.Equals(other.DeliveredDate)
) &&
(
this.TrackingNo == other.TrackingNo ||
this.TrackingNo != null &&
this.TrackingNo.Equals(other.TrackingNo)
) &&
(
this.WarehouseId == other.WarehouseId ||
this.WarehouseId != null &&
this.WarehouseId.Equals(other.WarehouseId)
) &&
(
this.LobId == other.LobId ||
this.LobId != null &&
this.LobId.Equals(other.LobId)
) &&
(
this.OrderNo == other.OrderNo ||
this.OrderNo != null &&
this.OrderNo.Equals(other.OrderNo)
) &&
(
this.CartonNo == other.CartonNo ||
this.CartonNo != null &&
this.CartonNo.Equals(other.CartonNo)
) &&
(
this.NumberOfCartons == other.NumberOfCartons ||
this.NumberOfCartons != null &&
this.NumberOfCartons.Equals(other.NumberOfCartons)
) &&
(
this.Status == other.Status ||
this.Status != null &&
this.Status.Equals(other.Status)
) &&
(
this.Shipped == other.Shipped ||
this.Shipped != null &&
this.Shipped.Equals(other.Shipped)
) &&
(
this.CarrierServiceId == other.CarrierServiceId ||
this.CarrierServiceId != null &&
this.CarrierServiceId.Equals(other.CarrierServiceId)
) &&
(
this.Dim1In == other.Dim1In ||
this.Dim1In != null &&
this.Dim1In.Equals(other.Dim1In)
) &&
(
this.Dim2In == other.Dim2In ||
this.Dim2In != null &&
this.Dim2In.Equals(other.Dim2In)
) &&
(
this.Dim3In == other.Dim3In ||
this.Dim3In != null &&
this.Dim3In.Equals(other.Dim3In)
) &&
(
this.EstimatedZone == other.EstimatedZone ||
this.EstimatedZone != null &&
this.EstimatedZone.Equals(other.EstimatedZone)
) &&
(
this.ParcelAccountNo == other.ParcelAccountNo ||
this.ParcelAccountNo != null &&
this.ParcelAccountNo.Equals(other.ParcelAccountNo)
) &&
(
this.ThirdPartyParcelAccountNo == other.ThirdPartyParcelAccountNo ||
this.ThirdPartyParcelAccountNo != null &&
this.ThirdPartyParcelAccountNo.Equals(other.ThirdPartyParcelAccountNo)
) &&
(
this.ManifestId == other.ManifestId ||
this.ManifestId != null &&
this.ManifestId.Equals(other.ManifestId)
) &&
(
this.Residential == other.Residential ||
this.Residential != null &&
this.Residential.Equals(other.Residential)
) &&
(
this.BillingOption == other.BillingOption ||
this.BillingOption != null &&
this.BillingOption.Equals(other.BillingOption)
) &&
(
this.WeightLbs == other.WeightLbs ||
this.WeightLbs != null &&
this.WeightLbs.Equals(other.WeightLbs)
) &&
(
this.DimWeight == other.DimWeight ||
this.DimWeight != null &&
this.DimWeight.Equals(other.DimWeight)
) &&
(
this.LicensePlateNumber == other.LicensePlateNumber ||
this.LicensePlateNumber != null &&
this.LicensePlateNumber.Equals(other.LicensePlateNumber)
) &&
(
this.ChargedFreightAmount == other.ChargedFreightAmount ||
this.ChargedFreightAmount != null &&
this.ChargedFreightAmount.Equals(other.ChargedFreightAmount)
) &&
(
this.PublishedFreightAmount == other.PublishedFreightAmount ||
this.PublishedFreightAmount != null &&
this.PublishedFreightAmount.Equals(other.PublishedFreightAmount)
) &&
(
this.RetailFreightAmount == other.RetailFreightAmount ||
this.RetailFreightAmount != null &&
this.RetailFreightAmount.Equals(other.RetailFreightAmount)
) &&
(
this.ExternalShippingSystemId == other.ExternalShippingSystemId ||
this.ExternalShippingSystemId != null &&
this.ExternalShippingSystemId.Equals(other.ExternalShippingSystemId)
) &&
(
this.ShipmentType == other.ShipmentType ||
this.ShipmentType != null &&
this.ShipmentType.Equals(other.ShipmentType)
) &&
(
this.CustomFields == other.CustomFields ||
this.CustomFields != null &&
this.CustomFields.SequenceEqual(other.CustomFields)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks etc, of course :)
if (this.Id != null)
hash = hash * 59 + this.Id.GetHashCode();
if (this.CreateDate != null)
hash = hash * 59 + this.CreateDate.GetHashCode();
if (this.ModifyDate != null)
hash = hash * 59 + this.ModifyDate.GetHashCode();
if (this.ShipDate != null)
hash = hash * 59 + this.ShipDate.GetHashCode();
if (this.DeliveredDate != null)
hash = hash * 59 + this.DeliveredDate.GetHashCode();
if (this.TrackingNo != null)
hash = hash * 59 + this.TrackingNo.GetHashCode();
if (this.WarehouseId != null)
hash = hash * 59 + this.WarehouseId.GetHashCode();
if (this.LobId != null)
hash = hash * 59 + this.LobId.GetHashCode();
if (this.OrderNo != null)
hash = hash * 59 + this.OrderNo.GetHashCode();
if (this.CartonNo != null)
hash = hash * 59 + this.CartonNo.GetHashCode();
if (this.NumberOfCartons != null)
hash = hash * 59 + this.NumberOfCartons.GetHashCode();
if (this.Status != null)
hash = hash * 59 + this.Status.GetHashCode();
if (this.Shipped != null)
hash = hash * 59 + this.Shipped.GetHashCode();
if (this.CarrierServiceId != null)
hash = hash * 59 + this.CarrierServiceId.GetHashCode();
if (this.Dim1In != null)
hash = hash * 59 + this.Dim1In.GetHashCode();
if (this.Dim2In != null)
hash = hash * 59 + this.Dim2In.GetHashCode();
if (this.Dim3In != null)
hash = hash * 59 + this.Dim3In.GetHashCode();
if (this.EstimatedZone != null)
hash = hash * 59 + this.EstimatedZone.GetHashCode();
if (this.ParcelAccountNo != null)
hash = hash * 59 + this.ParcelAccountNo.GetHashCode();
if (this.ThirdPartyParcelAccountNo != null)
hash = hash * 59 + this.ThirdPartyParcelAccountNo.GetHashCode();
if (this.ManifestId != null)
hash = hash * 59 + this.ManifestId.GetHashCode();
if (this.Residential != null)
hash = hash * 59 + this.Residential.GetHashCode();
if (this.BillingOption != null)
hash = hash * 59 + this.BillingOption.GetHashCode();
if (this.WeightLbs != null)
hash = hash * 59 + this.WeightLbs.GetHashCode();
if (this.DimWeight != null)
hash = hash * 59 + this.DimWeight.GetHashCode();
if (this.LicensePlateNumber != null)
hash = hash * 59 + this.LicensePlateNumber.GetHashCode();
if (this.ChargedFreightAmount != null)
hash = hash * 59 + this.ChargedFreightAmount.GetHashCode();
if (this.PublishedFreightAmount != null)
hash = hash * 59 + this.PublishedFreightAmount.GetHashCode();
if (this.RetailFreightAmount != null)
hash = hash * 59 + this.RetailFreightAmount.GetHashCode();
if (this.ExternalShippingSystemId != null)
hash = hash * 59 + this.ExternalShippingSystemId.GetHashCode();
if (this.ShipmentType != null)
hash = hash * 59 + this.ShipmentType.GetHashCode();
if (this.CustomFields != null)
hash = hash * 59 + this.CustomFields.GetHashCode();
return hash;
}
}
}
}
| |
// Copyright 2009 Auxilium B.V. - http://www.auxilium.nl/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace JelloScrum.Model.Entities
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using Castle.ActiveRecord;
using Castle.Components.Validator;
using Enumerations;
using Helpers;
using Interfaces;
/// <summary>
/// Represents a Task
/// </summary>
[ActiveRecord]
public class Task : ModelBase, ILoggable
{
#region Fields
private string title = string.Empty;
private Story story;
private string description = string.Empty;
private State state = State.Open;
private SprintUser assignedUser;
private DateTime? dateClosed;
private TimeSpan estimation;
private IList<TimeRegistration> timeRegistrations = new List<TimeRegistration>();
private IList<TaskLogMessage> logMessages = new List<TaskLogMessage>();
private IList<TaskComment> comments = new List<TaskComment>();
#endregion
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="Task"/> class.
/// </summary>
public Task()
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Task"/> class for the given story.
/// </summary>
/// <param name="story">The story.</param>
public Task(Story story)
{
story.AddTask(this);
}
/// <summary>
/// Initializes a new instance of the <see cref="Task"/> class with the given description.
/// </summary>
/// <param name="description">The description.</param>
public Task(string description)
{
description = description;
}
#endregion
#region Properties
/// <summary>
/// Title of the task
/// </summary>
[Property]
public virtual string Title
{
get { return title; }
set { title = value; }
}
/// <summary>
/// The story this task belongs to.
/// </summary>
/// <value>The story.</value>
[BelongsTo]
public virtual Story Story
{
get { return story; }
set { story = value; }
}
/// <summary>
/// Description of this task
/// </summary>
/// <value>The description.</value>
[Property(SqlType = "ntext")]
public virtual string Description
{
get { return description; }
set { description = value; }
}
/// <summary>
/// The status of this task.
/// </summary>
/// <value>The status.</value>
[Property]
public virtual State State
{
get { return state; }
set { state = value; }
}
/// <summary>
/// The user this task is assigned to.
/// </summary>
/// <value>The assigned user.</value>
[BelongsTo]
public virtual SprintUser AssignedUser
{
get { return assignedUser; }
set { assignedUser = value; }
}
/// <summary>
/// Estimated time for this task.
/// </summary>
/// <value>Estimated time.</value>
[Property(ColumnType = "TimeSpan"), ValidateNonEmpty("Estimate the time.")]
public virtual TimeSpan Estimation
{
get { return estimation; }
set { estimation = value; }
}
///// <summary>
///// Hulp property voor schatting
///// </summary>
//public virtual string SchattingString
//{
// get { return schattingString; }
// set { schattingString = value; }
//}
/// <summary>
/// Gets a readonly list of all timeregistrations belonging to this task.
/// To add a timeregistration, use <see cref="RegisterTime(Gebruiker, DateTime, Sprint, TimeSpan)"/>.
/// </summary>
/// <value>The time registrations.</value>
[HasMany(Cascade = ManyRelationCascadeEnum.AllDeleteOrphan, Inverse = true, Lazy = true, Access = PropertyAccess.FieldCamelcase)]
public virtual IList<TimeRegistration> TimeRegistrations
{
get { return new ReadOnlyCollection<TimeRegistration>(timeRegistrations); }
}
/// <summary>
/// The logmessages
/// </summary>
/// <value>the logmessages.</value>
[HasMany(Table = "LogMessage", Cascade = ManyRelationCascadeEnum.AllDeleteOrphan, Lazy = true, Inverse = true)]
public virtual IList<TaskLogMessage> LogMessages
{
get { return logMessages; }
set { logMessages = value; }
}
/// <summary>
/// Gets or sets the comments.
/// </summary>
/// <value>The comments.</value>
[HasMany(Table = "Comments", Cascade = ManyRelationCascadeEnum.AllDeleteOrphan, Lazy = true, Inverse = true)]
public virtual IList<TaskComment> Comments
{
get { return comments; }
set { comments = value; }
}
/// <summary>
/// The date this task was closed.
/// </summary>
[Property(Access = PropertyAccess.NosetterCamelcase)]
public virtual DateTime? DateClosed
{
get { return dateClosed; }
}
#endregion
#region derived properties
/// <summary>
/// Gets the name of the user this task is assigned to.
/// </summary>
public virtual string AssignedUserName
{
get
{
return AssignedUser != null ? AssignedUser.User.Name : string.Empty;
}
}
/// <summary>
/// Gets the available time left for this task.
/// </summary>
public virtual TimeSpan RemainingTime
{
get
{
return (Estimation - TotalTimeSpent());
}
}
#endregion
#region Methods
/// <summary>
/// Register time spent on this task.
/// </summary>
/// <param name="user">The user.</param>
/// <param name="date">The date.</param>
/// <param name="sprint">De sprint.</param>
/// <param name="time">The time.</param>
public virtual void RegisterTime(User user, DateTime date, Sprint sprint, TimeSpan time)
{
if (!story.Project.Sprints.Contains(sprint))
{
throw new ArgumentException("The given sprint does not belong to this project.", "sprint");
}
foreach (TimeRegistration registratie in GetTimeRegistrationsFor(user, sprint, date))
{
RemoveTimeRegistration(registratie);
}
//only add timeregistrations that actually contain time
if (time.TotalSeconds == 0)
return;
TimeRegistration timeRegistration = new TimeRegistration(user, date, sprint, this, time);
AddTimeRegistration(timeRegistration);
}
/// <summary>
/// Add a comment
/// </summary>
/// <param name="comment">The comment.</param>
public virtual void AddComment(TaskComment comment)
{
if (!comments.Contains(comment))
{
comments.Add(comment);
}
}
/// <summary>
/// Remove a comment
/// </summary>
/// <param name="comment">The comment.</param>
public virtual void RemoveComment(TaskComment comment)
{
if (comments.Contains(comment))
{
comments.Remove(comment);
}
}
/// <summary>
/// Calculates the total time spent on this task.
/// </summary>
/// <returns></returns>
public virtual TimeSpan TotalTimeSpent()
{
TimeSpan total = new TimeSpan(0);
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
total = total.Add(timeRegistration.Time);
}
return total;
}
/// <summary>
/// Calculates the total time spent on this task by the given user in the given daterange.
/// </summary>
/// <param name="user">The user.</param>
/// <param name="dateRange">The date range.</param>
/// <returns></returns>
public virtual TimeSpan TotalTimeSpent(User user, DateRange? dateRange)
{
TimeSpan total = new TimeSpan(0);
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
if ((user != null || timeRegistration.User == user) && (dateRange == null || dateRange.Value.Overlap(timeRegistration.Date.Date)))
{
total = total.Add(timeRegistration.Time);
}
}
return total;
}
/// <summary>
/// Calculates the total time spent on this task on the given date.
/// </summary>
/// <param name="date">The date.</param>
/// <returns></returns>
public virtual TimeSpan TotalTimeSpent(DateTime date)
{
TimeSpan total = new TimeSpan(0);
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
if (timeRegistration.Date.Date == date.Date)
{
total = total.Add(timeRegistration.Time);
}
}
return total;
}
/// <summary>
/// Calculates the total time spent on this task between the given start and end date.
/// </summary>
/// <param name="startDate">The start date.</param>
/// <param name="endDate">The end date.</param>
/// <returns></returns>
public virtual TimeSpan TotalTimeSpent(DateTime startDate, DateTime endDate)
{
TimeSpan total = new TimeSpan(0);
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
if (timeRegistration.Date.Date >= startDate.Date && timeRegistration.Date.Date <= endDate.Date)
{
total = total.Add(timeRegistration.Time);
}
}
return total;
}
/// <summary>
/// Calculates the total time spent on this task by the given user on the given date.
/// </summary>
/// <param name="user">The user.</param>
/// <param name="date">The date.</param>
/// <returns></returns>
public virtual TimeSpan TotaalBestedeTijd(User user, DateTime date)
{
TimeSpan total = new TimeSpan(0);
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
if (timeRegistration.User == user && timeRegistration.Date.Date == date.Date)
{
total = total.Add(timeRegistration.Time);
}
}
return total;
}
/// <summary>
/// Gets all timeregistrations of the given user.
/// </summary>
/// <param name="user">The user.</param>
/// <returns></returns>
public virtual IList<TimeRegistration> GetTimeRegistrationsFor(User user)
{
IList<TimeRegistration> userTimeRegistrations = new List<TimeRegistration>();
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
if (timeRegistration.User == user)
{
userTimeRegistrations.Add(timeRegistration);
}
}
return userTimeRegistrations;
}
/// <summary>
/// Gets all timeregistrations of the given user for the given sprint and date.
/// </summary>
/// <param name="user">The user.</param>
/// <param name="sprint">The sprint.</param>
/// <param name="date">The date.</param>
/// <returns></returns>
public virtual IList<TimeRegistration> GetTimeRegistrationsFor(User user, Sprint sprint, DateTime date)
{
IList<TimeRegistration> userTimeRegistrations = new List<TimeRegistration>();
foreach (TimeRegistration timeRegistration in timeRegistrations)
{
if (timeRegistration.User == user && timeRegistration.Sprint == sprint && timeRegistration.Date.ToShortDateString() == date.ToShortDateString())
{
userTimeRegistrations.Add(timeRegistration);
}
}
return userTimeRegistrations;
}
/// <summary>
/// Close this task.
/// </summary>
public virtual void Close()
{
state = State.Closed;
dateClosed = DateTime.Now;
}
/// <summary>
/// Sets this task as not-taken.
/// </summary>
public virtual void SetAsNotTaken()
{
if (assignedUser != null)
{
assignedUser.UnAssignTask(this);
}
}
//todo: refactoren / combine with above and create logmessage?
/// <summary>
/// Decouple this task from the user it was assigned to, set status as open and create a logmessage.
/// </summary>
public virtual void UnassignTaskAndSetSatusAsOpen(string logTitle, string logText)
{
if (assignedUser != null)
{
logText = logText + " \nWas assigned to: " + assignedUser.User.FullName;
assignedUser.UnAssignTask(this);
}
CreateLogmessage(logTitle, logText);
}
/// <summary>
/// Create a logmessage
/// </summary>
/// <param name="title">The title.</param>
/// <param name="text">The text.</param>
private void CreateLogmessage(string title, string text)
{
TaskLogMessage logMessage = new TaskLogMessage(this, title, text);
if (!logMessages.Contains(logMessage))
{
logMessages.Add(logMessage);
}
}
/// <summary>
/// Adds a timeregistration.
/// </summary>
/// <param name="timeRegistration">The time registration.</param>
private void AddTimeRegistration(TimeRegistration timeRegistration)
{
if (!timeRegistrations.Contains(timeRegistration))
{
timeRegistrations.Add(timeRegistration);
}
timeRegistration.Task = this;
}
/// <summary>
/// Removes a timeregistration.
/// </summary>
/// <param name="timeRegistration">The time registration.</param>
public virtual void RemoveTimeRegistration(TimeRegistration timeRegistration)
{
if (timeRegistrations.Contains(timeRegistration))
{
timeRegistrations.Remove(timeRegistration);
}
}
#endregion
}
}
| |
namespace Simple.Data.SqlTest
{
using System.Collections.Generic;
using System.Dynamic;
using System.Linq;
using NUnit.Framework;
using Resources;
[TestFixture]
public class UpsertTests
{
[TestFixtureSetUp]
public void Setup()
{
DatabaseHelper.Reset();
}
[Test]
public void TestUpsertWithNamedArgumentsAndExistingObject()
{
var db = DatabaseHelper.Open();
db.Users.UpsertById(Id: 1, Name: "Ford Prefect");
var user = db.Users.Get(1);
Assert.IsNotNull(user);
Assert.AreEqual(1, user.Id);
Assert.AreEqual("Ford Prefect", user.Name);
}
[Test]
public void TestUpsertWithNamedArgumentsAndExistingObjectUsingTransaction()
{
using (var tx = DatabaseHelper.Open().BeginTransaction())
{
tx.Users.UpsertById(Id: 1, Name: "Ford Prefect");
var user = tx.Users.Get(1);
tx.Commit();
Assert.IsNotNull(user);
Assert.AreEqual(1, user.Id);
Assert.AreEqual("Ford Prefect", user.Name);
}
}
[Test]
public void TestUpsertWithNamedArgumentsAndNewObject()
{
var db = DatabaseHelper.Open();
var user = db.Users.UpsertById(Id: 0, Name: "Ford Prefect", Password: "Foo", Age: 42);
Assert.IsNotNull(user);
Assert.AreNotEqual(0, user.Id);
Assert.AreEqual("Ford Prefect", user.Name);
Assert.AreEqual("Foo", user.Password);
Assert.AreEqual(42, user.Age);
}
[Test]
public void TestUpsertWithStaticTypeObject()
{
var db = DatabaseHelper.Open();
var user = new User {Id = 2, Name = "Charlie", Password = "foobar", Age = 42};
var actual = db.Users.Upsert(user);
Assert.IsNotNull(user);
Assert.AreEqual(2, actual.Id);
Assert.AreEqual("Charlie", actual.Name);
Assert.AreEqual("foobar", actual.Password);
Assert.AreEqual(42, actual.Age);
}
[Test]
public void TestUpsertByWithStaticTypeObject()
{
var db = DatabaseHelper.Open();
var user = new User {Id = 2, Name = "Charlie", Password = "foobar", Age = 42};
var actual = db.Users.UpsertById(user);
Assert.IsNotNull(user);
Assert.AreEqual(2, actual.Id);
Assert.AreEqual("Charlie", actual.Name);
Assert.AreEqual("foobar", actual.Password);
Assert.AreEqual(42, actual.Age);
}
[Test]
public void TestMultiUpsertWithStaticTypeObjectsForExistingRecords()
{
var db = DatabaseHelper.Open();
var users = new[]
{
new User { Id = 1, Name = "Slartibartfast", Password = "bistromathics", Age = 777 },
new User { Id = 2, Name = "Wowbagger", Password = "teatime", Age = int.MaxValue }
};
IList<User> actuals = db.Users.Upsert(users).ToList<User>();
Assert.AreEqual(2, actuals.Count);
Assert.AreEqual(1, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreEqual(2, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestMultiUpsertWithStaticTypeObjectsForNewRecords()
{
var db = DatabaseHelper.Open();
var users = new[]
{
new User { Name = "Slartibartfast", Password = "bistromathics", Age = 777 },
new User { Name = "Wowbagger", Password = "teatime", Age = int.MaxValue }
};
IList<User> actuals = db.Users.Upsert(users).ToList<User>();
Assert.AreEqual(2, actuals.Count);
Assert.AreNotEqual(0, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreNotEqual(0, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestMultiUpsertWithStaticTypeObjectsForMixedRecords()
{
var db = DatabaseHelper.Open();
var users = new[]
{
new User { Id = 1, Name = "Slartibartfast", Password = "bistromathics", Age = 777 },
new User { Name = "Wowbagger", Password = "teatime", Age = int.MaxValue }
};
IList<User> actuals = db.Users.Upsert(users).ToList<User>();
Assert.AreEqual(2, actuals.Count);
Assert.AreEqual(1, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreNotEqual(0, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestMultiUpsertWithStaticTypeObjectsAndNoReturn()
{
var db = DatabaseHelper.Open();
var users = new[]
{
new User { Name = "Slartibartfast", Password = "bistromathics", Age = 777 },
new User { Name = "Wowbagger", Password = "teatime", Age = int.MaxValue }
};
//IList<User> actuals = db.Users.Upsert(users).ToList<User>();
db.Users.Upsert(users);
var slartibartfast = db.Users.FindByName("Slartibartfast");
Assert.IsNotNull(slartibartfast);
Assert.AreNotEqual(0, slartibartfast.Id);
Assert.AreEqual("Slartibartfast", slartibartfast.Name);
Assert.AreEqual("bistromathics", slartibartfast.Password);
Assert.AreEqual(777, slartibartfast.Age);
var wowbagger = db.Users.FindByName("Wowbagger");
Assert.IsNotNull(wowbagger);
Assert.AreNotEqual(0, wowbagger.Id);
Assert.AreEqual("Wowbagger", wowbagger.Name);
Assert.AreEqual("teatime", wowbagger.Password);
Assert.AreEqual(int.MaxValue, wowbagger.Age);
}
[Test]
public void TestUpsertWithDynamicTypeObject()
{
var db = DatabaseHelper.Open();
dynamic user = new ExpandoObject();
user.Name = "Marvin";
user.Password = "diodes";
user.Age = 42000000;
var actual = db.Users.Upsert(user);
Assert.IsNotNull(user);
Assert.AreEqual("Marvin", actual.Name);
Assert.AreEqual("diodes", actual.Password);
Assert.AreEqual(42000000, actual.Age);
}
[Test]
public void TestMultiUpsertWithDynamicTypeObjects()
{
var db = DatabaseHelper.Open();
dynamic user1 = new ExpandoObject();
user1.Name = "Slartibartfast";
user1.Password = "bistromathics";
user1.Age = 777;
dynamic user2 = new ExpandoObject();
user2.Name = "Wowbagger";
user2.Password = "teatime";
user2.Age = int.MaxValue;
var users = new[] { user1, user2 };
IList<dynamic> actuals = db.Users.Upsert(users).ToList();
Assert.AreEqual(2, actuals.Count);
Assert.AreNotEqual(0, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreNotEqual(0, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestMultiUpsertWithErrorCallback()
{
var db = DatabaseHelper.Open();
dynamic user1 = new ExpandoObject();
user1.Name = "Slartibartfast";
user1.Password = "bistromathics";
user1.Age = 777;
dynamic user2 = new ExpandoObject();
user2.Name = null;
user2.Password = null;
user2.Age = null;
dynamic user3 = new ExpandoObject();
user3.Name = "Wowbagger";
user3.Password = "teatime";
user3.Age = int.MaxValue;
var users = new[] { user1, user2, user3 };
bool passed = false;
ErrorCallback onError = (o, exception) => passed = true;
IList<dynamic> actuals = db.Users.Upsert(users,onError).ToList();
Assert.IsTrue(passed, "Callback was not called.");
Assert.AreEqual(2, actuals.Count);
Assert.AreNotEqual(0, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreNotEqual(0, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestMultiUpsertWithErrorCallbackUsingTransaction()
{
IList<dynamic> actuals;
bool passed = false;
using (var tx = DatabaseHelper.Open().BeginTransaction())
{
dynamic user1 = new ExpandoObject();
user1.Name = "Slartibartfast";
user1.Password = "bistromathics";
user1.Age = 777;
dynamic user2 = new ExpandoObject();
user2.Name = null;
user2.Password = null;
user2.Age = null;
dynamic user3 = new ExpandoObject();
user3.Name = "Wowbagger";
user3.Password = "teatime";
user3.Age = int.MaxValue;
var users = new[] {user1, user2, user3};
ErrorCallback onError = (o, exception) => passed = true;
actuals = tx.Users.Upsert(users, onError).ToList();
}
Assert.IsTrue(passed, "Callback was not called.");
Assert.AreEqual(2, actuals.Count);
Assert.AreNotEqual(0, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreNotEqual(0, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestTransactionMultiUpsertWithErrorCallback()
{
var db = DatabaseHelper.Open();
IList<dynamic> actuals;
bool passed = false;
using (var tx = db.BeginTransaction())
{
dynamic user1 = new ExpandoObject();
user1.Name = "Slartibartfast";
user1.Password = "bistromathics";
user1.Age = 777;
dynamic user2 = new ExpandoObject();
user2.Name = null;
user2.Password = null;
user2.Age = null;
dynamic user3 = new ExpandoObject();
user3.Name = "Wowbagger";
user3.Password = "teatime";
user3.Age = int.MaxValue;
var users = new[] {user1, user2, user3};
ErrorCallback onError = (o, exception) => passed = true;
actuals = db.Users.Upsert(users, onError).ToList();
tx.Commit();
}
Assert.IsTrue(passed, "Callback was not called.");
Assert.AreEqual(2, actuals.Count);
Assert.AreNotEqual(0, actuals[0].Id);
Assert.AreEqual("Slartibartfast", actuals[0].Name);
Assert.AreEqual("bistromathics", actuals[0].Password);
Assert.AreEqual(777, actuals[0].Age);
Assert.AreNotEqual(0, actuals[1].Id);
Assert.AreEqual("Wowbagger", actuals[1].Name);
Assert.AreEqual("teatime", actuals[1].Password);
Assert.AreEqual(int.MaxValue, actuals[1].Age);
}
[Test]
public void TestWithImageColumn()
{
var db = DatabaseHelper.Open();
try
{
var image = GetImage.Image;
db.Images.Upsert(Id: 1, TheImage: image);
var img = (DbImage)db.Images.FindById(1);
Assert.IsTrue(image.SequenceEqual(img.TheImage));
}
finally
{
db.Images.DeleteById(1);
}
}
[Test]
public void TestUpsertWithVarBinaryMaxColumn()
{
var db = DatabaseHelper.Open();
var image = GetImage.Image;
var blob = new Blob
{
Id = 1,
Data = image
};
db.Blobs.Upsert(blob);
blob = db.Blobs.FindById(1);
Assert.IsTrue(image.SequenceEqual(blob.Data));
}
[Test]
public void TestUpsertWithSingleArgumentAndExistingObject()
{
var db = DatabaseHelper.Open();
var actual = db.Users.UpsertById(Id: 1);
Assert.IsNotNull(actual);
Assert.AreEqual(1, actual.Id);
Assert.IsNotNull(actual.Name);
}
[Test]
public void TestUpsertUserBySecondaryField()
{
var db = DatabaseHelper.Open();
var id = db.Users.UpsertByName(new User() { Age = 20, Name = "Black sheep", Password = "Bah" }).Id;
User actual = db.Users.FindById(id);
Assert.AreEqual(id, actual.Id);
Assert.AreEqual("Black sheep", actual.Name);
Assert.AreEqual("Bah", actual.Password);
Assert.AreEqual(20, actual.Age);
}
[Test]
public void TestUpsertUserByTwoSecondaryFields()
{
var db = DatabaseHelper.Open();
var id = db.Users.UpsertByNameAndPassword(new User() { Age = 20, Name = "Black sheep", Password = "Bah" }).Id;
User actual = db.Users.FindById(id);
Assert.AreEqual(id, actual.Id);
Assert.AreEqual("Black sheep", actual.Name);
Assert.AreEqual("Bah", actual.Password);
Assert.AreEqual(20, actual.Age);
}
[Test]
public void TestUpsertExisting()
{
var db = DatabaseHelper.Open();
var id = db.Users.UpsertByNameAndPassword(new User() { Age = 20, Name = "Black sheep", Password = "Bah" }).Id;
db.Users.UpsertById(new User() { Id = id, Age = 12, Name = "Dog", Password = "Bark" });
User actual = db.Users.FindById(id);
Assert.AreEqual(id, actual.Id);
Assert.AreEqual("Dog", actual.Name);
Assert.AreEqual("Bark", actual.Password);
Assert.AreEqual(12, actual.Age);
}
}
}
| |
using Desharp.Producers;
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Runtime.Remoting.Messaging;
using System.Threading;
using System.Web;
using System.Web.Hosting;
using System.Web.SessionState;
using static Desharp.Core.AppExitWatcher;
namespace Desharp.Core {
internal class Dispatcher {
internal static bool StaticInitialized = false;
internal static ReaderWriterLockSlim StaticInitLock = new ReaderWriterLockSlim();
internal static EnvType EnvType;
internal static string AppRoot;
internal static string SourcesRoot;
internal static string Directory;
internal static int LogWriteMilisecond = 0;
internal static List<string> WebDebugIps = null;
internal static int DumpDepth = 3;
internal static int DumpMaxLength = 1024;
internal static bool SourceLocation = false;
internal static bool? EnabledGlobal = null;
internal static bool DumpCompillerGenerated = false;
internal static LogFormat? OutputGlobal = null;
internal static Dictionary<string, int> Levels;
internal static string WebStaticErrorPage;
internal static readonly VirtualPathProvider VirtualPathProvider;
protected static List<string> webHtmlXmlMimeTypes = new List<string>() {
"text/html", "application/xhtml+xml", "text/xml",
"application/xml", "image/svg+xml", "application/rss+xml",
};
//protected static ReaderWriterLockSlim dispatchersLock = new ReaderWriterLockSlim();
internal static object dispatchersLock = new object { };
internal static volatile Dictionary<string, Dispatcher> dispatchers = new Dictionary<string, Dispatcher>();
protected static string callContextKey = typeof(Dispatcher).FullName;
protected static Dictionary<string, Type> webBarRegisteredPanels = new Dictionary<string, Type>();
internal Exception LastError = null;
internal int DumperSequence = 0;
internal string CurrentlyRendererView = "";
internal LogFormat Output;
internal bool? Enabled = null;
internal Dictionary<string, double> Timers = new Dictionary<string, double>();
internal bool WebAssetsInserted = false;
internal int WebRequestState = 0;
internal double WebRequestEndTime = 0;
internal FireDump FireDump = null;
protected bool? webRedirect = false;
// -1 - it will be never rendered, 0 - it shoud be rendered but will not by default, 1 - it will be rendered
protected int webRenderDesharpBar = 0;
protected bool webTransmitErrorPage = false;
protected List<List<RenderedPanel>> webReqEndSession = null;
protected Dictionary<string, Panels.IPanel> webBarPanels = null;
protected List<string> webExceptions = null;
static Dispatcher () {
lock (Dispatcher.dispatchersLock) {
//try {
Dispatcher.StaticInitLock.EnterUpgradeableReadLock();
if (Dispatcher.StaticInitialized) {
Dispatcher.StaticInitLock.ExitUpgradeableReadLock();
return;
}
Dispatcher.StaticInitLock.EnterWriteLock();
Dispatcher.StaticInitLock.ExitUpgradeableReadLock();
int cfgDepth = Config.GetDepth();
if (cfgDepth > 0) Dispatcher.DumpDepth = cfgDepth;
int cfgMaxLength = Config.GetMaxLength();
if (cfgMaxLength > 0) Dispatcher.DumpMaxLength = cfgMaxLength;
bool? cfgSourceLoc = Config.GetSourceLocation();
if (cfgSourceLoc.HasValue) Dispatcher.SourceLocation = cfgSourceLoc.Value;
Dispatcher.Levels = Config.GetLevels();
Dispatcher.LogWriteMilisecond = Config.GetLogWriteMilisecond();
Dispatcher.VirtualPathProvider = HostingEnvironment.VirtualPathProvider;
bool appRootInitialized = false;
if (HttpRuntime.AppDomainAppId != null && HostingEnvironment.IsHosted) {
Dispatcher.EnvType = EnvType.Web;
if (HttpContext.Current != null) {
try {
Dispatcher.AppRoot = HttpContext.Current.Server
.MapPath("~").Replace('\\', '/').TrimEnd('/');
appRootInitialized = true;
} catch {
}
}
Dispatcher.WebDebugIps = Config.GetDebugIps();
Dispatcher.staticInitWebRegisterPanels(typeof(Panels.Exceptions), typeof(Panels.Dumps));
Dispatcher.staticInitWebRegisterPanels(Config.GetDebugPanels());
Dispatcher.staticInitWebErrorPage(Config.GetErrorPage());
} else {
Dispatcher.EnvType = EnvType.Windows;
try {
Dispatcher.AppRoot = System.IO.Path.GetDirectoryName(
System.Diagnostics.Process.GetCurrentProcess().MainModule.FileName
).Replace('\\', '/').TrimEnd('/');
appRootInitialized = true;
} catch {
}
bool exited = false;
/*
var bgThread = new System.Threading.Thread(new ThreadStart(delegate () {
while (true) {
System.Threading.Thread.Sleep(1000);
if (exited) break;
}
}));
bgThread.IsBackground = true;
bgThread.Priority = ThreadPriority.Lowest;
bgThread.Start();
*/
AppDomain.CurrentDomain.UnhandledException += delegate (object o, UnhandledExceptionEventArgs e1) {
Debug.Log(e1.ExceptionObject as Exception);
exited = true;
if (e1.IsTerminating) Dispatcher.Disposed();
Environment.Exit(1);
};
if (Dispatcher.LogWriteMilisecond > 0) {
AppDomain.CurrentDomain.ProcessExit += delegate (object o, EventArgs e2) {
Dispatcher.Disposed();
exited = true;
};
AppExitWatcher.SetConsoleCtrlHandler(new HandlerRoutine((type) => {
if (exited) return true;
Dispatcher.Disposed();
return true;
}), true);
}
}
bool isWindows = Tools.IsWindows();
if (appRootInitialized && isWindows)
Dispatcher.AppRoot = Dispatcher.AppRoot.Substring(0, 1).ToUpper()
+ Dispatcher.AppRoot.Substring(1);
if (
appRootInitialized && isWindows && (
Dispatcher.AppRoot.IndexOf("/bin/Debug") == Dispatcher.AppRoot.Length - 10 ||
Dispatcher.AppRoot.IndexOf("/bin/Release") == Dispatcher.AppRoot.Length - 12
)
) {
Dispatcher.SourcesRoot = System.IO.Path.GetFullPath(
Dispatcher.AppRoot + "/../.."
).Replace('\\', '/');
} else {
Dispatcher.SourcesRoot = "";
}
Dispatcher.staticInitEnabledGlobal();
Dispatcher.staticInitOutputGlobal();
Dispatcher.staticInitDumpCompillerGenerated();
if (appRootInitialized)
Dispatcher.staticInitDirectory(Config.GetDirectory());
FileLog.StaticInit();
Dispatcher.StaticInitLock.ExitWriteLock();
//} catch (Exception e3) {
// Debug.InitErrors.Add(e3);
//}
}
}
internal static Dispatcher GetCurrent (bool createIfNecessary = true) {
string dispatchedKey = Dispatcher.EnvType == EnvType.Web
? Tools.GetRequestId().ToString()
: $"{Tools.GetProcessId()}:{Tools.GetThreadId()}";
Dispatcher result = null;
/*result = CallContext.GetData(Dispatcher.callContextKey) as Dispatcher;
if (!(result is Dispatcher)) {
result = new Dispatcher();
CallContext.SetData(Dispatcher.callContextKey, result);
}*/
/*Dispatcher.dispatchersLock.EnterUpgradeableReadLock();
if (!Dispatcher.dispatchers.ContainsKey(dispatchedKey) & createIfNecessary) {
Dispatcher.dispatchersLock.EnterWriteLock();
Dispatcher.dispatchersLock.ExitUpgradeableReadLock();
result = new Dispatcher();
Dispatcher.dispatchers[dispatchedKey] = result;
Dispatcher.dispatchersLock.ExitWriteLock();
} else {
Dispatcher.dispatchersLock.EnterReadLock();
Dispatcher.dispatchersLock.ExitUpgradeableReadLock();
result = Dispatcher.dispatchers[dispatchedKey];
Dispatcher.dispatchersLock.ExitReadLock();
}*/
lock (Dispatcher.dispatchersLock) {
if (!Dispatcher.dispatchers.ContainsKey(dispatchedKey) && createIfNecessary) {
Dispatcher.dispatchers[dispatchedKey] = new Dispatcher();
}
if (Dispatcher.dispatchers.ContainsKey(dispatchedKey)) {
result = Dispatcher.dispatchers[dispatchedKey];
}
}
return result;
}
internal static bool Remove () {
bool removed = false;
string dispatchedKey = Dispatcher.EnvType == EnvType.Web
? Tools.GetRequestId().ToString()
: $"{Tools.GetProcessId()}:{Tools.GetThreadId()}";
/*CallContext.FreeNamedDataSlot(Dispatcher.callContextKey);
return true;*/
/*Dispatcher.dispatchersLock.EnterWriteLock();
removed = Dispatcher.dispatchers.ContainsKey(dispatchedKey)
? Dispatcher.dispatchers.Remove(dispatchedKey)
: false;
Dispatcher.dispatchersLock.ExitWriteLock();*/
lock (Dispatcher.dispatchersLock) {
removed = Dispatcher.dispatchers.Remove(dispatchedKey);
}
return removed;
}
internal static void Disposed () {
FileLog.Disposed();
}
internal static bool WebCheckIfRequestIsForHtml () {
string[] requestAcceptTypes = HttpContext.Current.Request.AcceptTypes;
string requestFirstAcceptType = requestAcceptTypes.Length > 0 ? requestAcceptTypes[0].ToLower() : "";
bool result = false;
foreach (string mimeType in Dispatcher.webHtmlXmlMimeTypes) {
if (mimeType.IndexOf("html") > -1) {
if (requestFirstAcceptType.IndexOf(mimeType) > -1) {
result = true;
break;
}
} else {
break;
}
}
return result;
}
internal static bool WebCheckIfResponseIsHtmlOrXml (bool checkForXmlOnly = false) {
string responseContentType = HttpContext.Current.Response.ContentType.ToLower();
bool result = false;
if (checkForXmlOnly) {
foreach (string mimeType in Dispatcher.webHtmlXmlMimeTypes) {
if (mimeType.IndexOf("xml") > -1 && responseContentType.IndexOf(mimeType) > -1) {
result = true;
break;
}
}
} else {
foreach (string mimeType in Dispatcher.webHtmlXmlMimeTypes) {
if (mimeType.IndexOf("html") > -1) {
if (responseContentType.IndexOf(mimeType) > -1) {
result = true;
break;
}
} else {
break;
}
}
}
return result;
}
protected static void staticInitEnabledGlobal () {
if (Dispatcher.EnabledGlobal.HasValue) return;
// determinate enabled bool globaly by main process compilation mode
bool webEnvironment = Dispatcher.EnvType == EnvType.Web;
// first - look into config if there is strictly defined debug mode on or off
bool? configScrictValue = Config.GetEnabled();
if (configScrictValue.HasValue) {
Dispatcher.EnabledGlobal = configScrictValue.Value;
} else {
// try to determinate debug mode by entry assembly compilation type:
bool entryAssemblyBuildedAsDebug = Tools.IsAssemblyBuildAsDebug(
webEnvironment ? Tools.GetWebEntryAssembly() : Tools.GetWindowsEntryAssembly()
);
// try to determinate debug mode by (app|web).config if there is node with attribute bellow:
//<configuration>
// <system.web>
// <compilation
// debug ="true" <----- THIS BOOLEAN
// targetFramework ="4.5"/>
// </system.web>
//</configuration>
bool debugModeByConfig = webEnvironment && HttpContext.Current.IsDebuggingEnabled;
// try to determinate if debugger from visual studio is currently attached
bool vsDebuggerAttached = System.Diagnostics.Debugger.IsAttached;
// now set enabled boolean to true if any of these values is true
Dispatcher.EnabledGlobal = entryAssemblyBuildedAsDebug || debugModeByConfig || vsDebuggerAttached;
}
if (!webEnvironment) {
// for desktop apps - create every second checking if debugger is attached
WinDebuggerAttaching.GetInstance().Changed += (o, e) => {
Dispatcher.EnabledGlobal = ((WinDebuggerAttachingEventArgs)e).Attached;
};
}
}
protected static void staticInitOutputGlobal () {
LogFormat? strictConfigValue = Config.GetLogFormat();
if (strictConfigValue.HasValue) {
Dispatcher.OutputGlobal = strictConfigValue;
} else {
Dispatcher.OutputGlobal = LogFormat.Text;
}
}
protected static void staticInitDumpCompillerGenerated () {
bool? dumpCompillerGenerated = Config.GetDumpCompillerGenerated();
if (dumpCompillerGenerated.HasValue) Dispatcher.DumpCompillerGenerated = dumpCompillerGenerated.Value;
}
protected static void staticInitDirectory (string dirRelOrFullPath = "") {
string fullPath;
if (dirRelOrFullPath.Length > 0) {
fullPath = dirRelOrFullPath;
if (fullPath.IndexOf("~") > -1) {
fullPath = fullPath.Replace("~", Dispatcher.AppRoot);
}
fullPath = Path.GetFullPath(fullPath);
fullPath = fullPath.Replace('\\', '/').TrimEnd('/');
} else {
fullPath = Dispatcher.AppRoot;
}
if (Tools.IsWindows()) fullPath = fullPath.Substring(0, 1).ToUpper() + fullPath.Substring(1);
Dispatcher.Directory = fullPath;
// create the directory if doesn't exists and if ot's not a root dir
if (Dispatcher.AppRoot != Dispatcher.Directory) {
if (!(System.IO.Directory.Exists(Dispatcher.Directory))) {
try {
System.IO.Directory.CreateDirectory(Dispatcher.Directory);
} catch (Exception e) {
Dispatcher.Directory = Dispatcher.AppRoot;
Debug.Dump(e);
}
}
}
}
protected static void staticInitWebRegisterPanels (params Type[] panels) {
Type panel;
for (int i = 0; i < panels.Length; i++) {
panel = panels[i];
if (!Dispatcher.webBarRegisteredPanels.ContainsKey(panel.FullName)) {
Dispatcher.webBarRegisteredPanels.Add(panel.FullName, panel);
}
}
Type sysInfoPanelType = typeof(Panels.SystemInfo);
string sysInfoPanelName = sysInfoPanelType.FullName;
if (Dispatcher.webBarRegisteredPanels.ContainsKey(sysInfoPanelName)) {
Dispatcher.webBarRegisteredPanels.Remove(sysInfoPanelName);
Dispatcher.webBarRegisteredPanels = (
new Dictionary<string, Type> { { sysInfoPanelName, sysInfoPanelType } }
).Concat(Dispatcher.webBarRegisteredPanels)
.ToDictionary(k => k.Key, v => v.Value);
}
}
protected static void staticInitWebErrorPage (string cfgErrorPage) {
string errorPage = "";
if (cfgErrorPage.Length > 0) {
if (cfgErrorPage.IndexOf("~") > -1) {
cfgErrorPage = cfgErrorPage.Replace("~", Dispatcher.AppRoot);
}
cfgErrorPage = Path.GetFullPath(cfgErrorPage);
cfgErrorPage = cfgErrorPage.Replace('\\', '/');
if (File.Exists(cfgErrorPage)) {
errorPage = File.ReadAllText(cfgErrorPage);
}
}
if (errorPage.Length == 0) errorPage = Assets.error_html;
Dispatcher.WebStaticErrorPage = errorPage;
}
protected static bool webCheckIfResponseIsRedirect () {
HttpResponse response = HttpContext.Current.Response;
int httpStatusCode = response.StatusCode;
bool redirectCode = (httpStatusCode >= 300 && httpStatusCode < 400);
if (redirectCode) return true;
bool redirectHeader = false;
string[] headerNames = response.Headers.AllKeys;
string header;
for (int i = 0, l = headerNames.Length; i < l; i += 1) {
header = headerNames[i].Trim().ToLower();
if (header.IndexOf("location", StringComparison.OrdinalIgnoreCase) == 0 || header.IndexOf("refresh", StringComparison.OrdinalIgnoreCase) == 0) {
redirectHeader = true;
break;
}
}
return redirectHeader;
}
protected static bool webIsRequestToFileWithDotNetExecExtension () {
string ext = HttpContext.Current.Request.CurrentExecutionFilePathExtension.ToLower();
// empty string for example in all MVC apps controller/action requests...
return ext == "" || ext == ".aspx" || ext == ".asp" || ext == ".cshtml" || ext == ".cshtm" || ext == ".vbhtml" || ext == ".vbhtm" || ext == ".ashx" || ext == ".asmx";
}
protected static bool webIsRequestToHomepageOrDefaultHomepageFile () {
string relativeRequestPath = HttpContext.Current.Request.AppRelativeCurrentExecutionFilePath.ToLower();
return relativeRequestPath == "~/" || relativeRequestPath == "~/default.aspx" || relativeRequestPath == "~/index.aspx";
}
protected static List<List<RenderedPanel>> webGetSessionStorrage () {
List<List<RenderedPanel>> result = new List<List<RenderedPanel>>();
HttpSessionState session = HttpContext.Current.Session;
if (session is HttpSessionState && session[Debug.SESSION_STORAGE_KEY] is List<List<RenderedPanel>>) {
result = (List<List<RenderedPanel>>)session[Debug.SESSION_STORAGE_KEY];
}
return result;
}
internal Dispatcher () {
if (Dispatcher.EnvType == EnvType.Web) {
this.webInitEnabled();
} else {
this.Enabled = Dispatcher.EnabledGlobal;
}
if (System.Diagnostics.Debugger.IsAttached) this.Enabled = true;
this.Output = Dispatcher.OutputGlobal.Value;
}
internal FireDump GetFireDump () {
if (this.FireDump == null) this.FireDump = new FireDump(this.Enabled == true);
return this.FireDump;
}
internal void Configure (DebugConfig cfg) {
lock (Dispatcher.dispatchersLock) {
if (cfg.EnvType != EnvType.Auto)
Dispatcher.EnvType = cfg.EnvType;
if (cfg.Enabled.HasValue)
this.Enabled = cfg.Enabled.Value;
if (cfg.LogFormat != LogFormat.Auto)
this.Output = cfg.LogFormat;
if (cfg.Directory != null && cfg.Directory.Length > 0)
Dispatcher.staticInitDirectory(cfg.Directory);
if (cfg.ErrorPage != null && cfg.ErrorPage.Length > 0)
Dispatcher.staticInitWebErrorPage(cfg.ErrorPage);
if (cfg.Depth != null && cfg.Depth.Value > 0)
Dispatcher.DumpDepth = cfg.Depth.Value;
if (cfg.LogWriteMilisecond != null && cfg.LogWriteMilisecond.Value > 0) {
Dispatcher.LogWriteMilisecond = cfg.LogWriteMilisecond.Value;
FileLog.InitBackgroundWritingIfNecessary();
}
if (cfg.SourceLocation.HasValue)
Dispatcher.SourceLocation = cfg.SourceLocation.Value;
if (cfg.Panels != null && cfg.Panels.Length > 0)
Dispatcher.staticInitWebRegisterPanels(cfg.Panels);
}
}
internal void WriteDumpToOutput (string dumpedCode) {
if (Dispatcher.EnvType == EnvType.Web) {
if (!this.Enabled.HasValue) this.webInitEnabled();
if (this.Enabled.Value != true) {
FileLog.Log(dumpedCode, LevelValues.Values[Level.DEBUG]);
} else {
if (this.webBarPanels == null) {
this.webRenderDesharpBar = 1; // forcely change to render web bar
this.webInitWebBarPanels();
}
if (this.webBarPanels.ContainsKey(Panels.Dumps.PanelName)) {
Panels.Dumps dumpsPanel = this.webBarPanels[Panels.Dumps.PanelName] as Panels.Dumps;
dumpsPanel.AddRenderedDump(dumpedCode);
}
}
} else {
Console.Write(dumpedCode);
}
}
internal void WriteExceptionToOutput (List<string> dumpedExceptions) {
if (Dispatcher.EnvType == EnvType.Web) {
if (!this.Enabled.HasValue) this.webInitEnabled();
if (this.Enabled.Value != true) {
foreach (string dumpedException in dumpedExceptions) {
FileLog.Log(dumpedException, "exception");
}
} else {
if (this.webBarPanels == null) {
this.webRenderDesharpBar = 1; // forcely change to render web bar
this.webInitWebBarPanels();
}
if (this.webBarPanels.ContainsKey(Panels.Dumps.PanelName)) {
Panels.Exceptions exceptionsPanel = this.webBarPanels[Panels.Exceptions.PanelName] as Panels.Exceptions;
foreach (string dumpedException in dumpedExceptions) {
exceptionsPanel.AddRenderedException(dumpedException);
}
}
}
} else {
Console.WriteLine(String.Join(Environment.NewLine, dumpedExceptions.ToArray()));
}
}
internal void Stop () {
if (Dispatcher.EnvType == EnvType.Web) {
if (this.WebRequestState == 0) this.WebRequestBegin();
if (this.WebRequestState == 1) this.WebRequestSessionBegin();
if (this.WebRequestState == 2) this.WebRequestSessionEnd();
this.WebRequestPreSendHeaders();
this.WebRequestPreSendBody(); // HttpContext.Current.Response.Flush();
Dispatcher.Remove();
HttpContext.Current.Response.Flush();
HttpContext.Current.Response.End();
} else {
Environment.Exit(Environment.ExitCode);
}
}
internal void WebRequestBegin () {
if (this.Enabled == true) {
this.webInitBarRendering();
this.webInitWebBarPanels();
}
this.WebRequestState = 1;
}
internal void WebRequestSessionBegin () {
if (this.WebRequestState < 2) {
if (this.Enabled == true && this.webBarPanels != null) {
Desharp.Panels.ISessionPanel sessionPanel;
foreach (var item in this.webBarPanels) {
if (item.Value is Desharp.Panels.ISessionPanel) {
sessionPanel = item.Value as Desharp.Panels.ISessionPanel;
try {
sessionPanel.SessionBegin();
} catch { }
}
}
}
this.WebRequestState = 2;
}
}
internal void WebRequestSessionEnd () {
if (this.WebRequestState < 3) {
if (this.Enabled == true && this.webBarPanels != null) {
this.WebRequestEndTime = Debug.GetProcessingTime();
HttpSessionState session = HttpContext.Current.Session;
List<List<RenderedPanel>> sessionStorrage = Dispatcher.webGetSessionStorrage();
this.webRedirect = Dispatcher.webCheckIfResponseIsRedirect();
if (this.webRedirect == true) {
this.webRequestSessionEndCallBarPanelsSessionEnd();
List<RenderedPanel> renderedPanels = HtmlResponse.RenderDebugPanels(this.webBarPanels);
sessionStorrage.Insert(0, renderedPanels);
if (session is HttpSessionState) {
session[Debug.SESSION_STORAGE_KEY] = sessionStorrage;
}
this.webBarPanels = null; // frees memory
} else {
this.webReqEndSession = sessionStorrage;
// clear session storage, panels will be rendered in this request end event
if (session is HttpSessionState && session[Debug.SESSION_STORAGE_KEY] != null) {
session.Remove(Debug.SESSION_STORAGE_KEY);
}
this.webRequestSessionEndCallBarPanelsSessionEnd();
}
}
this.WebRequestState = 3;
}
}
internal void WebRequestPreSendHeaders () {
this.GetFireDump().CloseHeaders();
if (this.Enabled == true) {
if (!this.webRedirect.HasValue) this.webRedirect = Dispatcher.webCheckIfResponseIsRedirect();
// add possible rendered exceptions and debug bar if necessary
if (this.webRenderDesharpBar > -1 && Dispatcher.WebCheckIfResponseIsHtmlOrXml()) {
this.webRenderDesharpBar = 1;
}
if (this.webRenderDesharpBar == 1 && this.webRedirect != true) {
string responseContentType = HttpContext.Current.Response.ContentType.ToLower();
if (!Dispatcher.WebCheckIfResponseIsHtmlOrXml()) {
// if there was necessary to render in output anything (by response type change to text/html)
// change response to that type if it is not any proper type to render any html code
HttpContext.Current.Response.ContentType = "text/html";
}
// manage Content-Security-Policy http header
this.webManageContentSecurityPolicyHeader();
}
} else {
if (this.webTransmitErrorPage && Dispatcher.WebStaticErrorPage.Length > 0)
HtmlResponse.TransmitStaticErrorPagePrepareHeaders();
}
}
internal void WebRequestPreSendBody() {
if (this.Enabled == true) {
if (this.webRenderDesharpBar == 1 && this.webRedirect != true) {
// render debug bar for current request with any previous redirect records from session
List<List<RenderedPanel>> renderedPanels = this.webReqEndSession
?? Dispatcher.webGetSessionStorrage();
if (this.webBarPanels != null) {
renderedPanels.Insert(0, HtmlResponse.RenderDebugPanels(this.webBarPanels));
this.webBarPanels = null;
}
HtmlResponse.WriteDebugBarToResponse(renderedPanels);
} else {
//HttpContext.Current.Response.Flush();
}
} else {
if (this.webTransmitErrorPage && Dispatcher.WebStaticErrorPage.Length > 0)
HtmlResponse.TransmitStaticErrorPageSendContent();
//HttpContext.Current.Response.Flush();
}
}
protected void webManageContentSecurityPolicyHeader () {
NameValueCollection rawHeaders = HttpContext.Current.Response.Headers;
List<string> headers = rawHeaders.AllKeys.ToList<string>();
string headerName = "";
string headerValue = "";
if (headers.Contains("Content-Security-Policy")) headerName = "Content-Security-Policy";
if (headers.Contains("X-Content-Security-Policy")) headerName = "X-Content-Security-Policy";
if (headerName.Length == 0) return;
headerValue = rawHeaders[headerName];
List<string> headerValueExploded = headerValue.Split(';').ToList<string>();
string explodedItem;
List<string> resultValues = new List<string>();
bool scriptSrcCatched = false;
bool styleSrcCatched = false;
bool imgSrcCatched = false;
bool fontSrcCatched = false;
for (int i = 0, l = headerValueExploded.Count; i < l; i += 1) {
explodedItem = headerValueExploded[i].Trim();
if (explodedItem.IndexOf("script-src") > -1) {
scriptSrcCatched = true;
if (explodedItem.IndexOf("'unsafe-inline'") == -1) explodedItem += " 'unsafe-inline'";
if (explodedItem.IndexOf("'unsafe-eval'") == -1) explodedItem += " 'unsafe-eval'";
}
if (explodedItem.IndexOf("style-src") > -1) {
styleSrcCatched = true;
if (explodedItem.IndexOf("'unsafe-inline'") == -1) explodedItem += " 'unsafe-inline'";
}
if (explodedItem.IndexOf("img-src") > -1) {
imgSrcCatched = true;
if (explodedItem.IndexOf("'self'") == -1) explodedItem += " 'self'";
if (explodedItem.IndexOf("data:") == -1) explodedItem += " data:";
}
if (explodedItem.IndexOf("font-src") > -1) {
fontSrcCatched = true;
if (explodedItem.IndexOf("'self'") == -1) explodedItem += " 'self'";
if (explodedItem.IndexOf("data:") == -1) explodedItem += " data:";
}
resultValues.Add(explodedItem);
}
if (!scriptSrcCatched) resultValues.Add("script-src 'unsafe-inline' 'unsafe-eval'");
if (!styleSrcCatched) resultValues.Add("style-src 'unsafe-inline'");
if (!imgSrcCatched) resultValues.Add("img-src 'self' data:");
if (!fontSrcCatched) resultValues.Add("font-src 'self' data:");
HttpContext.Current.Response.Headers.Set(headerName, String.Join("; ", resultValues.ToArray()));
}
internal void WebRequestError () {
// get causing exception object
Exception lastException = HttpContext.Current.Server.GetLastError();
if (lastException == null) return;
// clear stupid microsoft error screen
HttpContext.Current.Server.ClearError();
// get request id
long crt = Tools.GetRequestId();
if (this.Enabled == true) {
// render exception and store it for request end to send to into client
Debug.Dump(lastException, new DumpOptions {
CatchedException = false,
SourceLocation = true
});
// keep everything bad, what should be written in response
} else {
// write exception into hard drive
Debug.Log(lastException);
// clear everything bad, what shoud be written in response
HttpContext.Current.Response.Clear();
// transmit error page at request end
this.webTransmitErrorPage = true;
}
HttpContext.Current.Response.Flush();
Dispatcher.Remove();
}
protected void webInitEnabled () {
this.Enabled = Dispatcher.EnabledGlobal == true;
// if there are defined any debug ips - then allow globaly allowed debug mode only for listed client ips
if (this.Enabled == true && Dispatcher.WebDebugIps.Count > 0) {
string clientIpAddress = Tools.GetClientIpAddress().ToLower();
this.Enabled = Dispatcher.WebDebugIps.Contains(clientIpAddress);
}
}
protected void webRequestSessionEndCallBarPanelsSessionEnd () {
if (this.webBarPanels != null) {
Desharp.Panels.ISessionPanel sessionPanel;
foreach (var item in this.webBarPanels) {
if (item.Value is Desharp.Panels.ISessionPanel) {
sessionPanel = item.Value as Desharp.Panels.ISessionPanel;
try {
sessionPanel.SessionEnd();
} catch { }
}
}
}
}
protected void webInitWebBarPanels (long crt = -1) {
if (this.webRenderDesharpBar < 1) return; // do not register any panels for non html/xml outputs
this.webBarPanels = new Dictionary<string, Panels.IPanel>();
Panels.IPanel panel;
foreach (var item in Dispatcher.webBarRegisteredPanels) {
panel = (Panels.IPanel)Activator.CreateInstance(item.Value);
if (this.webBarPanels.ContainsKey(panel.Name)) {
throw new Exception(String.Format(
"Panel with name: '{0}' has been already registered, use different panel name.", panel.Name
));
}
this.webBarPanels.Add(panel.Name, panel);
}
}
protected void webInitBarRendering () {
string relativeRequestPath = HttpContext.Current.Request.AppRelativeCurrentExecutionFilePath;
if (
Dispatcher.webIsRequestToHomepageOrDefaultHomepageFile() == false &&
Dispatcher.webIsRequestToFileWithDotNetExecExtension() == false && (
Dispatcher.VirtualPathProvider.FileExists(relativeRequestPath) ||
Dispatcher.VirtualPathProvider.DirectoryExists(relativeRequestPath)
)
) {
this.webRenderDesharpBar = -1;
} else if (Dispatcher.WebCheckIfRequestIsForHtml()) {
this.webRenderDesharpBar = 1;
} else {
this.webRenderDesharpBar = 0;
}
}
}
}
| |
//*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the Microsoft Public License.
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Runtime.Serialization;
using System.Runtime.Serialization.Json;
using System.Security.Cryptography;
using System.Text;
using System.Web;
namespace TwitterClient.Common
{
public struct TwitterConfig
{
public readonly string OAuthToken;
public readonly string OAuthTokenSecret;
public readonly string OAuthConsumerKey;
public readonly string OAuthConsumerSecret;
public readonly string Keywords;
public readonly string SearchGroups;
public TwitterConfig(string oauthToken, string oauthTokenSecret, string oauthConsumerKey, string oauthConsumerSecret, string keywords, string searchGroups)
{
OAuthToken = oauthToken;
OAuthTokenSecret = oauthTokenSecret;
OAuthConsumerKey = oauthConsumerKey;
OAuthConsumerSecret = oauthConsumerSecret;
Keywords = keywords;
SearchGroups = searchGroups;
}
}
[DataContract]
public class TwitterUser
{
[DataMember(Name = "time_zone")] public string TimeZone;
[DataMember(Name = "name")] public string Name;
[DataMember(Name = "profile_image_url")] public string ProfileImageUrl;
}
[DataContract]
public class Tweet
{
[DataMember(Name = "id")] public Int64 Id;
[DataMember(Name = "in_reply_to_status_id")] public Int64? ReplyToStatusId;
[DataMember(Name = "in_reply_to_user_id")] public Int64? ReplyToUserId;
[DataMember(Name = "in_reply_to_screen_name")] public string ReplyToScreenName;
[DataMember(Name = "retweeted")] public bool Retweeted;
[DataMember(Name = "text")] public string Text;
[DataMember(Name = "lang")] public string Language;
[DataMember(Name = "source")] public string Source;
[DataMember(Name = "retweet_count")] public string RetweetCount;
[DataMember(Name = "user")] public TwitterUser User;
[DataMember(Name = "created_at")] public string CreatedAt;
[IgnoreDataMember] public string RawJson;
public Tweet()
{
keepRunning = true;
}
public bool keepRunning { get; set; }
public IEnumerable<Tweet> StreamStatuses(TwitterConfig config)
{
DataContractJsonSerializer jsonSerializer = new DataContractJsonSerializer(typeof(Tweet));
var streamReader = ReadTweets(config);
while (keepRunning)
{
string line = null;
try { line = streamReader.ReadLine(); }
catch (Exception) { }
if (!string.IsNullOrWhiteSpace(line) && !line.StartsWith("{\"delete\""))
{
var result = (Tweet)jsonSerializer.ReadObject(new MemoryStream(Encoding.UTF8.GetBytes(line)));
result.RawJson = line;
yield return result;
}
// Oops the Twitter has ended... or more likely some error have occurred.
// Reconnect to the twitter feed.
if (line == null)
{
streamReader = ReadTweets(config);
}
}
}
public HttpWebRequest Request { get; set;}
static TextReader ReadTweets(TwitterConfig config)
{
var oauth_version = "1.0";
var oauth_signature_method = "HMAC-SHA1";
// unique request details
var oauth_nonce = Convert.ToBase64String(new ASCIIEncoding().GetBytes(DateTime.Now.Ticks.ToString()));
var oauth_timestamp = Convert.ToInt64(
(DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc))
.TotalSeconds).ToString();
var resource_url = "https://stream.twitter.com/1.1/statuses/filter.json";
// create oauth signature
var baseString = string.Format(
"oauth_consumer_key={0}&oauth_nonce={1}&oauth_signature_method={2}&" +
"oauth_timestamp={3}&oauth_token={4}&oauth_version={5}&track={6}",
config.OAuthConsumerKey,
oauth_nonce,
oauth_signature_method,
oauth_timestamp,
config.OAuthToken,
oauth_version,
Uri.EscapeDataString(config.Keywords));
baseString = string.Concat("POST&", Uri.EscapeDataString(resource_url), "&", Uri.EscapeDataString(baseString));
var compositeKey = string.Concat(Uri.EscapeDataString(config.OAuthConsumerSecret),
"&", Uri.EscapeDataString(config.OAuthTokenSecret));
string oauth_signature;
using (var hasher = new HMACSHA1(ASCIIEncoding.ASCII.GetBytes(compositeKey)))
{
oauth_signature = Convert.ToBase64String(
hasher.ComputeHash(ASCIIEncoding.ASCII.GetBytes(baseString)));
}
// create the request header
var authHeader = string.Format(
"OAuth oauth_nonce=\"{0}\", oauth_signature_method=\"{1}\", " +
"oauth_timestamp=\"{2}\", oauth_consumer_key=\"{3}\", " +
"oauth_token=\"{4}\", oauth_signature=\"{5}\", " +
"oauth_version=\"{6}\"",
Uri.EscapeDataString(oauth_nonce),
Uri.EscapeDataString(oauth_signature_method),
Uri.EscapeDataString(oauth_timestamp),
Uri.EscapeDataString(config.OAuthConsumerKey),
Uri.EscapeDataString(config.OAuthToken),
Uri.EscapeDataString(oauth_signature),
Uri.EscapeDataString(oauth_version)
);
// make the request
ServicePointManager.Expect100Continue = false;
// updating TLS version "TLS < 1.2 is unsupported by Tweeter starting July 15, 2019"
ServicePointManager.SecurityProtocol |= SecurityProtocolType.Tls12;
var postBody = "track=" + HttpUtility.UrlEncode(config.Keywords);
resource_url += "?" + postBody;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(resource_url);
request.Headers.Add("Authorization", authHeader);
request.Method = "POST";
request.ContentType = "application/x-www-form-urlencoded";
request.PreAuthenticate = true;
request.AllowWriteStreamBuffering = true;
request.CachePolicy = new System.Net.Cache.RequestCachePolicy(System.Net.Cache.RequestCacheLevel.BypassCache);
// bail out and retry after 5 seconds
var tresponse = request.GetResponseAsync();
if (tresponse.Wait(5000))
return new StreamReader(tresponse.Result.GetResponseStream());
else
{
request.Abort();
return StreamReader.Null;
}
}
}
public class TwitterPayload
{
public Int64 ID;
public DateTime CreatedAt;
public string UserName;
public string TimeZone;
public string ProfileImageUrl;
public string Text;
public string Language;
public string Topic;
public int SentimentScore;
public string RawJson;
public override string ToString()
{
return new { ID, CreatedAt, UserName, TimeZone, ProfileImageUrl, Text, Language, Topic, SentimentScore }.ToString();
}
}
public class Payload
{
public DateTime CreatedAt { get; set; }
public string Topic { get; set; }
public int SentimentScore { get; set; }
public string Author { get; set; }
public string Text { get; set; }
public bool SendExtended { get; set; }
public override string ToString()
{
return SendExtended ? new { CreatedAt, Topic, SentimentScore, Author, Text }.ToString() : new { CreatedAt, Topic, SentimentScore }.ToString();
}
}
public class TwitterMin
{
public Int64 ID;
public DateTime CreatedAt;
public string UserName;
// public string TimeZone;
// public string ProfileImageUrl;
public string Text;
// public string Language;
public string Topic;
public int SentimentScore;
// public string RawJson;
public override string ToString()
{
return new { ID, CreatedAt, UserName, Text, Topic, SentimentScore }.ToString();
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections;
using System.Xml;
using System.IO;
using System.Globalization;
using Microsoft.Build.BuildEngine.Shared;
using error = Microsoft.Build.BuildEngine.Shared.ErrorUtilities;
namespace Microsoft.Build.BuildEngine
{
/// <summary>
/// Class representing a When block (also used to represent the Otherwise
/// block on a Choose).
/// </summary>
internal class When
{
#region Member Data
public enum Options
{
ProcessWhen,
ProcessOtherwise,
};
private GroupingCollection propertyAndItemLists = null;
private Project parentProject = null;
// This is the "Condition" attribute on the <PropertyGroup> element above.
private XmlAttribute conditionAttribute = null;
#endregion
#region Constructors
/// <summary>
/// Constructor for the When block. Parses the contents of the When block (property
/// groups, item groups, and nested chooses) and stores them.
/// </summary>
/// <remarks>
/// </remarks>
/// <owner>DavidLe</owner>
/// <param name="parentProject"></param>
/// <param name="parentGroupingCollection"></param>
/// <param name="whenElement"></param>
/// <param name="importedFromAnotherProject"></param>
/// <param name="options"></param>
/// <param name="nestingDepth">stack overflow guard</param>
internal When(
Project parentProject,
GroupingCollection parentGroupingCollection,
XmlElement whenElement,
bool importedFromAnotherProject,
Options options,
int nestingDepth
)
{
// Make sure the <When> node has been given to us.
error.VerifyThrow(whenElement != null, "Need valid (non-null) <When> element.");
// Make sure this really is the <When> node.
error.VerifyThrow(whenElement.Name == XMakeElements.when || whenElement.Name == XMakeElements.otherwise,
"Expected <{0}> or <{1}> element; received <{2}> element.",
XMakeElements.when, XMakeElements.otherwise, whenElement.Name);
this.propertyAndItemLists = new GroupingCollection(parentGroupingCollection);
this.parentProject = parentProject;
string elementName = ((options == Options.ProcessWhen) ? XMakeElements.when : XMakeElements.otherwise);
if (options == Options.ProcessWhen)
{
conditionAttribute = ProjectXmlUtilities.GetConditionAttribute(whenElement, /*verify sole attribute*/ true);
ProjectErrorUtilities.VerifyThrowInvalidProject(conditionAttribute != null, whenElement, "MissingCondition", XMakeElements.when);
}
else
{
ProjectXmlUtilities.VerifyThrowProjectNoAttributes(whenElement);
}
ProcessWhenChildren(whenElement, parentProject, importedFromAnotherProject, nestingDepth);
}
#endregion
#region Properties
/// <summary>
/// Property containing the condition for the When clause.
/// </summary>
/// <remarks>
/// </remarks>
/// <owner>DavidLe</owner>
/// <returns>string</returns>
internal string Condition
{
get
{
return (this.conditionAttribute == null) ? String.Empty : this.conditionAttribute.Value;
}
}
/// <summary>
/// Property containing the condition for the When clause.
/// </summary>
/// <remarks>
/// </remarks>
/// <owner>DavidLe</owner>
/// <returns>string</returns>
internal XmlAttribute ConditionAttribute
{
get
{
return this.conditionAttribute;
}
}
#endregion
/// <summary>
/// The collection of all sub-groups (item/property groups and chooses) inside this When
/// </summary>
internal GroupingCollection PropertyAndItemLists
{
get
{
return this.propertyAndItemLists;
}
}
#region Methods
/// <summary>
/// Helper method for processing the children of a When. Only parses Choose,
/// PropertyGroup, and ItemGroup. All other tags result in an error.
/// </summary>
/// <remarks>
/// </remarks>
/// <owner>DavidLe</owner>
/// <param name="parentNode"></param>
/// <param name="parentProjectForChildren"></param>
/// <param name="importedFromAnotherProject"></param>
/// <param name="options"></param>
/// <param name="nestingDepth">Number of parent <Choose> elements this is nested inside</param>
private void ProcessWhenChildren
(
XmlElement parentNode,
Project parentProjectForChildren, bool importedFromAnotherProject,
int nestingDepth
)
{
// Loop through the child nodes of the <When> element.
foreach (XmlNode whenChildNode in parentNode)
{
switch (whenChildNode.NodeType)
{
// Handle XML comments under the <When> node (just ignore them).
case XmlNodeType.Comment:
// fall through
case XmlNodeType.Whitespace:
// ignore whitespace
break;
case XmlNodeType.Element:
{
// Make sure this element doesn't have a custom namespace
ProjectXmlUtilities.VerifyThrowProjectValidNamespace((XmlElement)whenChildNode);
// The only three types of child nodes that a <When> element can contain
// are <PropertyGroup>, <ItemGroup> and <Choose>.
switch (whenChildNode.Name)
{
case XMakeElements.itemGroup:
BuildItemGroup newItemGroup = new BuildItemGroup((XmlElement)whenChildNode, importedFromAnotherProject, parentProjectForChildren);
this.propertyAndItemLists.InsertAtEnd(newItemGroup);
break;
// Process the <PropertyGroup> element.
case XMakeElements.propertyGroup:
BuildPropertyGroup newPropertyGroup = new BuildPropertyGroup(parentProjectForChildren, (XmlElement)whenChildNode, importedFromAnotherProject);
newPropertyGroup.EnsureNoReservedProperties();
this.propertyAndItemLists.InsertAtEnd(newPropertyGroup);
break;
// Process the <Choose> element.
case XMakeElements.choose:
Choose newChoose = new Choose(parentProjectForChildren, this.PropertyAndItemLists, (XmlElement)whenChildNode,
importedFromAnotherProject, nestingDepth);
this.propertyAndItemLists.InsertAtEnd(newChoose);
break;
default:
{
ProjectXmlUtilities.ThrowProjectInvalidChildElement(whenChildNode);
break;
}
}
}
break;
default:
{
ProjectXmlUtilities.ThrowProjectInvalidChildElement(whenChildNode);
break;
}
}
}
}
/// <summary>
/// Evaluates a When clause. Checks if the condition is true, and if it is,
/// applies all of the contained property group, item lists, and import statements.
/// Returns true if the When clause is process (because the condition is true), false
/// otherwise.
/// </summary>
/// <remarks>
/// </remarks>
/// <owner>DavidLe</owner>
/// <param name="parentPropertyBag"></param>
/// <param name="conditionedPropertiesTable"></param>
/// <returns>bool</returns>
internal bool EvaluateCondition
(
BuildPropertyGroup parentPropertyBag,
Hashtable conditionedPropertiesTable
)
{
if (
(this.Condition != null)
&&
!Utilities.EvaluateCondition(this.Condition, this.ConditionAttribute,
new Expander(parentPropertyBag, parentProject.EvaluatedItemsByName),
conditionedPropertiesTable, ParserOptions.AllowProperties, this.parentProject.ParentEngine.LoggingServices, this.parentProject.ProjectBuildEventContext)
)
{
return false;
}
return true;
}
/// <summary>
/// Evaluates a When clause. Checks if the condition is true, and if it is,
/// applies all of the contained property group, item lists, and import statements.
/// Returns true if the When clause is process (because the condition is true), false
/// otherwise.
/// </summary>
/// <remarks>
/// </remarks>
/// <owner>DavidLe</owner>
/// <param name="parentPropertyBag"></param>
/// <param name="ignoreCondition"></param>
/// <param name="honorCondition"></param>
/// <param name="conditionedPropertiesTable"></param>
/// <param name="pass"></param>
/// <returns>bool</returns>
internal void Evaluate
(
BuildPropertyGroup parentPropertyBag,
bool ignoreCondition, bool honorCondition,
Hashtable conditionedPropertiesTable,
ProcessingPass pass
)
{
foreach (IItemPropertyGrouping propOrItem in this.propertyAndItemLists)
{
// This is where we selectively evaluate PropertyGroups or Itemgroups during their respective passes.
// Once we go to a one-pass model, we'll simple spin through all the children and evaluate.
if (propOrItem is BuildPropertyGroup &&
pass == ProcessingPass.Pass1)
{
((BuildPropertyGroup) propOrItem).Evaluate(parentPropertyBag, conditionedPropertiesTable, pass);
}
else if (propOrItem is BuildItemGroup &&
pass == ProcessingPass.Pass2)
{
((BuildItemGroup) propOrItem).Evaluate(parentPropertyBag, parentProject.EvaluatedItemsByName, ignoreCondition, honorCondition, pass);
}
else if (propOrItem is Choose)
{
((Choose) propOrItem).Evaluate(parentPropertyBag, ignoreCondition, honorCondition, conditionedPropertiesTable, pass);
}
}
}
#endregion
}
}
| |
/*
* Copyright 2015 Software Freedom Conservancy.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.IO;
using System.Net;
using System.Threading;
using System.Collections;
using System.Text;
using Selenium;
namespace Selenium
{
/// <summary>
/// Sends commands and retrieves results via HTTP.
/// </summary>
public class HttpCommandProcessor : ICommandProcessor
{
private readonly string url;
private string sessionId;
private string browserStartCommand;
private string browserURL;
private string extensionJs;
/// <summary>
/// The server URL, to whom we send command requests
/// </summary>
public string Url
{
get { return url; }
}
/// <summary>
/// Specifies a server host/port, a command to launch the browser, and a starting URL for the browser.
/// </summary>
/// <param name="serverHost">the host name on which the Selenium Server resides</param>
/// <param name="serverPort">the port on which the Selenium Server is listening</param>
/// <param name="browserStartCommand">the command string used to launch the browser, e.g. "*firefox" or "c:\\program files\\internet explorer\\iexplore.exe"</param>
/// <param name="browserURL">the starting URL including just a domain name. We'll start the browser pointing at the Selenium resources on this URL,
/// e.g. "http://www.google.com" would send the browser to "http://www.google.com/selenium-server/RemoteRunner.html"</param>
public HttpCommandProcessor(string serverHost, int serverPort, string browserStartCommand, string browserURL)
{
this.url = "http://" + serverHost +
":"+ serverPort + "/selenium-server/driver/";
this.browserStartCommand = browserStartCommand;
this.browserURL = browserURL;
this.extensionJs = "";
}
/// <summary>
/// Specifies the URL to the server, a command to launch the browser, and a starting URL for the browser.
/// </summary>
/// <param name="serverURL">the URL of the Selenium Server Driver, e.g. "http://localhost:4444/selenium-server/driver/" (don't forget the final slash!)</param>
/// <param name="browserStartCommand">the command string used to launch the browser, e.g. "*firefox" or "c:\\program files\\internet explorer\\iexplore.exe"</param>
/// <param name="browserURL">the starting URL including just a domain name. We'll start the browser pointing at the Selenium resources on this URL,
/// e.g. "http://www.google.com" would send the browser to "http://www.google.com/selenium-server/RemoteRunner.html"</param>
public HttpCommandProcessor(string serverURL, string browserStartCommand, string browserURL)
{
this.url = serverURL;
this.browserStartCommand = browserStartCommand;
this.browserURL = browserURL;
this.extensionJs = "";
}
/// <summary>
/// Send the specified remote command to the browser to be performed
/// </summary>
/// <param name="command">the remote command verb</param>
/// <param name="args">the arguments to the remote command (depends on the verb)</param>
/// <returns>the command result, defined by the remote JavaScript. "getX" style
/// commands may return data from the browser</returns>
public string DoCommand(string command, string[] args)
{
IRemoteCommand remoteCommand = new DefaultRemoteCommand(command, args);
using (HttpWebResponse response = (HttpWebResponse) CreateWebRequest(remoteCommand).GetResponse())
{
if (response.StatusCode != HttpStatusCode.OK)
{
throw new SeleniumException(response.StatusDescription);
}
string resultBody = ReadResponse(response);
if (!resultBody.StartsWith("OK"))
{
throw new SeleniumException(resultBody);
}
return resultBody;
}
}
/// <summary>
/// Retrieves the body of the HTTP response
/// </summary>
/// <param name="response">the response object to read</param>
/// <returns>the body of the HTTP response</returns>
public virtual string ReadResponse(HttpWebResponse response)
{
using (StreamReader reader = new StreamReader(response.GetResponseStream()))
{
return reader.ReadToEnd();
}
}
/// <summary>
/// Builds an HTTP request based on the specified remote Command
/// </summary>
/// <param name="command">the command we'll send to the server</param>
/// <returns>an HTTP request, which will perform this command</returns>
public virtual WebRequest CreateWebRequest(IRemoteCommand command)
{
byte[] data = BuildCommandPostData(command.CommandString);
HttpWebRequest request = (HttpWebRequest) WebRequest.Create(url);
request.Method = "POST";
request.ContentType = "application/x-www-form-urlencoded; charset=utf-8";
request.Timeout = Timeout.Infinite;
request.ServicePoint.ConnectionLimit = 2000;
Stream rs = request.GetRequestStream();
rs.Write(data, 0, data.Length);
rs.Close();
return request;
}
private byte[] BuildCommandPostData(string commandString)
{
string data = commandString;
if (sessionId != null)
{
data += "&sessionId=" + sessionId;
}
return (new UTF8Encoding()).GetBytes(data);
}
/// <summary>
/// Sets the extension Javascript to be used in the created session
/// </summary>
/// <param name="extensionJs">The extension JavaScript to use.</param>
public void SetExtensionJs(string extensionJs)
{
this.extensionJs = extensionJs;
}
/// <summary>
/// Creates a new browser session
/// </summary>
public void Start()
{
string result = GetString("getNewBrowserSession", new String[] {browserStartCommand, browserURL, extensionJs});
sessionId = result;
}
/// <summary>
/// Take any extra options that may be needed when creating a browser session
/// </summary>
/// <param name="optionString">Browser Options</param>
public void Start(string optionString)
{
string result = GetString("getNewBrowserSession", new String[] { browserStartCommand, browserURL, extensionJs, optionString });
sessionId = result;
}
/// <summary>
/// Wraps the version of start() that takes a string parameter, sending it the result
/// of calling ToString() on optionsObject, which will likey be a BrowserConfigurationOptions instan
/// </summary>
/// <param name="optionsObject">Contains BrowserConfigurationOptions </param>
public void Start(Object optionsObject)
{
Start(optionsObject.ToString());
}
/// <summary>
/// Stops the previous browser session, killing the browser
/// </summary>
public void Stop()
{
DoCommand("testComplete", null);
sessionId = null;
}
/// <summary>
/// Runs the specified remote accessor (getter) command and returns the retrieved result
/// </summary>
/// <param name="commandName">the remote Command verb</param>
/// <param name="args">the arguments to the remote Command (depends on the verb)</param>
/// <returns>the result of running the accessor on the browser</returns>
public String GetString(String commandName, String[] args)
{
return DoCommand(commandName, args).Substring(3); // skip "OK,"
}
/// <summary>
/// Runs the specified remote accessor (getter) command and returns the retrieved result
/// </summary>
/// <param name="commandName">the remote Command verb</param>
/// <param name="args">the arguments to the remote Command (depends on the verb)</param>
/// <returns>the result of running the accessor on the browser</returns>
public String[] GetStringArray(String commandName, String[] args)
{
String result = GetString(commandName, args);
return parseCSV(result);
}
/// <summary>
/// Parse Selenium comma separated values.
/// </summary>
/// <param name="input">the comma delimited string to parse</param>
/// <returns>the parsed comma-separated entries</returns>
public static String[] parseCSV(String input)
{
ArrayList output = new ArrayList();
StringBuilder sb = new StringBuilder();
for(int i = 0; i < input.Length; i++)
{
char c = input.ToCharArray()[i];
switch (c)
{
case ',':
output.Add(sb.ToString());
sb = new StringBuilder();
continue;
case '\\':
i++;
c = input.ToCharArray()[i];
sb.Append(c);
continue;
default:
sb.Append(c);
break;
}
}
output.Add(sb.ToString());
return (String[]) output.ToArray(typeof(String));
}
/// <summary>
/// Runs the specified remote accessor (getter) command and returns the retrieved result
/// </summary>
/// <param name="commandName">the remote Command verb</param>
/// <param name="args">the arguments to the remote Command (depends on the verb)</param>
/// <returns>the result of running the accessor on the browser</returns>
public Decimal GetNumber(String commandName, String[] args)
{
String result = GetString(commandName, args);
Decimal d = Decimal.Parse(result);
return d;
}
/// <summary>
/// Runs the specified remote accessor (getter) command and returns the retrieved result
/// </summary>
/// <param name="commandName">the remote Command verb</param>
/// <param name="args">the arguments to the remote Command (depends on the verb)</param>
/// <returns>the result of running the accessor on the browser</returns>
public Decimal[] GetNumberArray(String commandName, String[] args)
{
String[] result = GetStringArray(commandName, args);
Decimal[] d = new Decimal[result.Length];
for (int i = 0; i < result.Length; i++)
{
d[i] = Decimal.Parse(result[i]);
}
return d;
}
/// <summary>
/// Runs the specified remote accessor (getter) command and returns the retrieved result
/// </summary>
/// <param name="commandName">the remote Command verb</param>
/// <param name="args">the arguments to the remote Command (depends on the verb)</param>
/// <returns>the result of running the accessor on the browser</returns>
public bool GetBoolean(String commandName, String[] args)
{
String result = GetString(commandName, args);
bool b;
if ("true".Equals(result))
{
b = true;
return b;
}
if ("false".Equals(result))
{
b = false;
return b;
}
throw new Exception("result was neither 'true' nor 'false': " + result);
}
/// <summary>
/// Runs the specified remote accessor (getter) command and returns the retrieved result
/// </summary>
/// <param name="commandName">the remote Command verb</param>
/// <param name="args">the arguments to the remote Command (depends on the verb)</param>
/// <returns>the result of running the accessor on the browser</returns>
public bool[] GetBooleanArray(String commandName, String[] args)
{
String[] result = GetStringArray(commandName, args);
bool[] b = new bool[result.Length];
for (int i = 0; i < result.Length; i++)
{
if ("true".Equals(result))
{
b[i] = true;
continue;
}
if ("false".Equals(result))
{
b[i] = false;
continue;
}
throw new Exception("result was neither 'true' nor 'false': " + result);
}
return b;
}
}
}
| |
//Copyright (c) Service Stack LLC. All Rights Reserved.
//License: https://raw.github.com/ServiceStack/ServiceStack/master/license.txt
using System;
using System.Collections;
using System.Collections.Generic;
using System.Data;
using System.Linq;
namespace ServiceStack.OrmLite
{
public interface IOrmLiteResultsFilter
{
long GetLastInsertId(IDbCommand dbCmd);
List<T> GetList<T>(IDbCommand dbCmd);
IList GetRefList(IDbCommand dbCmd, Type refType);
T GetSingle<T>(IDbCommand dbCmd);
object GetRefSingle(IDbCommand dbCmd, Type refType);
T GetScalar<T>(IDbCommand dbCmd);
object GetScalar(IDbCommand dbCmd);
long GetLongScalar(IDbCommand dbCmd);
List<T> GetColumn<T>(IDbCommand dbCmd);
HashSet<T> GetColumnDistinct<T>(IDbCommand dbCmd);
Dictionary<K, V> GetDictionary<K, V>(IDbCommand dbCmd);
Dictionary<K, List<V>> GetLookup<K, V>(IDbCommand dbCmd);
int ExecuteSql(IDbCommand dbCmd);
}
public class OrmLiteResultsFilter : IOrmLiteResultsFilter, IDisposable
{
public IEnumerable Results { get; set; }
public IEnumerable RefResults { get; set; }
public IEnumerable ColumnResults { get; set; }
public IEnumerable ColumnDistinctResults { get; set; }
public IDictionary DictionaryResults { get; set; }
public IDictionary LookupResults { get; set; }
public object SingleResult { get; set; }
public object RefSingleResult { get; set; }
public object ScalarResult { get; set; }
public long LongScalarResult { get; set; }
public long LastInsertId { get; set; }
public int ExecuteSqlResult { get; set; }
public Func<IDbCommand, int> ExecuteSqlFn { get; set; }
public Func<IDbCommand, Type, IEnumerable> ResultsFn { get; set; }
public Func<IDbCommand, Type, IEnumerable> RefResultsFn { get; set; }
public Func<IDbCommand, Type, IEnumerable> ColumnResultsFn { get; set; }
public Func<IDbCommand, Type, IEnumerable> ColumnDistinctResultsFn { get; set; }
public Func<IDbCommand, Type, Type, IDictionary> DictionaryResultsFn { get; set; }
public Func<IDbCommand, Type, Type, IDictionary> LookupResultsFn { get; set; }
public Func<IDbCommand, Type, object> SingleResultFn { get; set; }
public Func<IDbCommand, Type, object> RefSingleResultFn { get; set; }
public Func<IDbCommand, Type, object> ScalarResultFn { get; set; }
public Func<IDbCommand, long> LongScalarResultFn { get; set; }
public Func<IDbCommand, long> LastInsertIdFn { get; set; }
public Action<string> SqlFilter { get; set; }
public bool PrintSql { get; set; }
private readonly IOrmLiteResultsFilter previousFilter;
public OrmLiteResultsFilter(IEnumerable results = null)
{
this.Results = results ?? new object[] { };
previousFilter = OrmLiteConfig.ResultsFilter;
OrmLiteConfig.ResultsFilter = this;
}
private void Filter(IDbCommand dbCmd)
{
if (SqlFilter != null)
{
SqlFilter(dbCmd.CommandText);
}
if (PrintSql)
{
Console.WriteLine(dbCmd.CommandText);
}
}
private IEnumerable GetResults<T>(IDbCommand dbCmd)
{
return ResultsFn != null ? ResultsFn(dbCmd, typeof(T)) : Results;
}
private IEnumerable GetRefResults(IDbCommand dbCmd, Type refType)
{
return RefResultsFn != null ? RefResultsFn(dbCmd, refType) : RefResults;
}
private IEnumerable GetColumnResults<T>(IDbCommand dbCmd)
{
return ColumnResultsFn != null ? ColumnResultsFn(dbCmd, typeof(T)) : ColumnResults;
}
private IEnumerable GetColumnDistinctResults<T>(IDbCommand dbCmd)
{
return ColumnDistinctResultsFn != null ? ColumnDistinctResultsFn(dbCmd, typeof(T)) : ColumnDistinctResults;
}
private IDictionary GetDictionaryResults<K, V>(IDbCommand dbCmd)
{
return DictionaryResultsFn != null ? DictionaryResultsFn(dbCmd, typeof(K), typeof(V)) : DictionaryResults;
}
private IDictionary GetLookupResults<K, V>(IDbCommand dbCmd)
{
return LookupResultsFn != null ? LookupResultsFn(dbCmd, typeof(K), typeof(V)) : LookupResults;
}
private object GetSingleResult<T>(IDbCommand dbCmd)
{
return SingleResultFn != null ? SingleResultFn(dbCmd, typeof(T)) : SingleResult;
}
private object GetRefSingleResult(IDbCommand dbCmd, Type refType)
{
return RefSingleResultFn != null ? RefSingleResultFn(dbCmd, refType) : RefSingleResult;
}
private object GetScalarResult<T>(IDbCommand dbCmd)
{
return ScalarResultFn != null ? ScalarResultFn(dbCmd, typeof(T)) : ScalarResult;
}
private long GetLongScalarResult(IDbCommand dbCmd)
{
return LongScalarResultFn != null ? LongScalarResultFn(dbCmd) : LongScalarResult;
}
public long GetLastInsertId(IDbCommand dbCmd)
{
return LastInsertIdFn != null ? LastInsertIdFn(dbCmd) : LastInsertId;
}
public List<T> GetList<T>(IDbCommand dbCmd)
{
Filter(dbCmd);
return (from object result in GetResults<T>(dbCmd) select (T)result).ToList();
}
public IList GetRefList(IDbCommand dbCmd, Type refType)
{
Filter(dbCmd);
var list = (IList)typeof(List<>).MakeGenericType(refType).CreateInstance();
foreach (object result in GetRefResults(dbCmd, refType))
{
list.Add(result);
}
return list;
}
public T GetSingle<T>(IDbCommand dbCmd)
{
Filter(dbCmd);
if (SingleResult != null || SingleResultFn != null)
return (T)GetSingleResult<T>(dbCmd);
foreach (var result in GetResults<T>(dbCmd))
{
return (T)result;
}
return default(T);
}
public object GetRefSingle(IDbCommand dbCmd, Type refType)
{
Filter(dbCmd);
if (RefSingleResult != null || RefSingleResultFn != null)
return GetRefSingleResult(dbCmd, refType);
foreach (var result in GetRefResults(dbCmd, refType))
{
return result;
}
return null;
}
public T GetScalar<T>(IDbCommand dbCmd)
{
Filter(dbCmd);
return ConvertTo<T>(GetScalarResult<T>(dbCmd));
}
public long GetLongScalar(IDbCommand dbCmd)
{
Filter(dbCmd);
return GetLongScalarResult(dbCmd);
}
private T ConvertTo<T>(object value)
{
if (value == null)
return default(T);
if (value is T)
return (T)value;
var typeCode = typeof(T).GetUnderlyingTypeCode();
var strValue = value.ToString();
switch (typeCode)
{
case TypeCode.Boolean:
return (T)(object)Convert.ToBoolean(strValue);
case TypeCode.Byte:
return (T)(object)Convert.ToByte(strValue);
case TypeCode.Int16:
return (T)(object)Convert.ToInt16(strValue);
case TypeCode.Int32:
return (T)(object)Convert.ToInt32(strValue);
case TypeCode.Int64:
return (T)(object)Convert.ToInt64(strValue);
case TypeCode.Single:
return (T)(object)Convert.ToSingle(strValue);
case TypeCode.Double:
return (T)(object)Convert.ToDouble(strValue);
case TypeCode.Decimal:
return (T)(object)Convert.ToDecimal(strValue);
}
return (T)value;
}
public object GetScalar(IDbCommand dbCmd)
{
Filter(dbCmd);
return GetScalarResult<object>(dbCmd) ?? GetResults<object>(dbCmd).Cast<object>().FirstOrDefault();
}
public List<T> GetColumn<T>(IDbCommand dbCmd)
{
Filter(dbCmd);
return (from object result in (GetColumnResults<T>(dbCmd) ?? new T[0]) select (T)result).ToList();
}
public HashSet<T> GetColumnDistinct<T>(IDbCommand dbCmd)
{
Filter(dbCmd);
var results = GetColumnDistinctResults<T>(dbCmd) ?? GetColumnResults<T>(dbCmd);
return (from object result in results select (T)result).ToHashSet();
}
public Dictionary<K, V> GetDictionary<K, V>(IDbCommand dbCmd)
{
Filter(dbCmd);
var to = new Dictionary<K, V>();
var map = GetDictionaryResults<K, V>(dbCmd);
if (map == null)
return to;
foreach (DictionaryEntry entry in map)
{
to.Add((K)entry.Key, (V)entry.Value);
}
return to;
}
public Dictionary<K, List<V>> GetLookup<K, V>(IDbCommand dbCmd)
{
Filter(dbCmd);
var to = new Dictionary<K, List<V>>();
var map = GetLookupResults<K, V>(dbCmd);
if (map == null)
return to;
foreach (DictionaryEntry entry in map)
{
var key = (K)entry.Key;
List<V> list;
if (!to.TryGetValue(key, out list))
{
to[key] = list = new List<V>();
}
list.AddRange(from object item in (IEnumerable)entry.Value select (V)item);
}
return to;
}
public int ExecuteSql(IDbCommand dbCmd)
{
Filter(dbCmd);
if (ExecuteSqlFn != null)
{
return ExecuteSqlFn(dbCmd);
}
return ExecuteSqlResult;
}
public void Dispose()
{
OrmLiteConfig.ResultsFilter = previousFilter;
}
}
public class CaptureSqlFilter : OrmLiteResultsFilter
{
public CaptureSqlFilter()
{
SqlFilter = CaptureSql;
SqlStatements = new List<string>();
}
private void CaptureSql(string sql)
{
SqlStatements.Add(sql);
}
public List<string> SqlStatements { get; set; }
}
}
| |
using Abp.Domain.Entities;
using Abp.Domain.Entities.Auditing;
using Abp.Domain.Repositories;
using Abp.EntityHistory;
using Abp.Events.Bus.Entities;
using Abp.Extensions;
using Abp.Json;
using Abp.Threading;
using Abp.Timing;
using Abp.Zero.SampleApp.EntityHistory;
using Castle.MicroKernel.Registration;
using NSubstitute;
using Shouldly;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Data.Entity.Infrastructure;
using System.Linq;
using System.Threading;
using Abp.Application.Editions;
using Abp.Application.Features;
using Abp.Zero.SampleApp.TPH;
using Xunit;
namespace Abp.Zero.SampleApp.Tests.EntityHistory
{
public class SimpleEntityHistory_Test : SampleAppTestBase
{
private readonly IRepository<Advertisement> _advertisementRepository;
private readonly IRepository<Blog> _blogRepository;
private readonly IRepository<Post, Guid> _postRepository;
private readonly IRepository<Comment> _commentRepository;
private readonly IRepository<Student> _studentRepository;
private IEntityHistoryStore _entityHistoryStore;
public SimpleEntityHistory_Test()
{
_advertisementRepository = Resolve<IRepository<Advertisement>>();
_blogRepository = Resolve<IRepository<Blog>>();
_postRepository = Resolve<IRepository<Post, Guid>>();
_commentRepository = Resolve<IRepository<Comment>>();
_studentRepository = Resolve<IRepository<Student>>();
var user = GetDefaultTenantAdmin();
AbpSession.TenantId = user.TenantId;
AbpSession.UserId = user.Id;
Resolve<IEntityHistoryConfiguration>().IsEnabledForAnonymousUsers = true;
}
protected override void PreInitialize()
{
base.PreInitialize();
_entityHistoryStore = Substitute.For<IEntityHistoryStore>();
LocalIocManager.IocContainer.Register(
Component.For<IEntityHistoryStore>().Instance(_entityHistoryStore).LifestyleSingleton()
);
}
#region CASES WRITE HISTORY
[Fact]
public void Should_Write_History_For_Tracked_Entities_Create()
{
/* Advertisement does not have Audited attribute. */
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Advertisement));
int? advertisementId = null;
WithUnitOfWork(() =>
{
var advertisement = new Advertisement { Banner = "tracked-advertisement" };
advertisementId = _advertisementRepository.InsertAndGetId(advertisement);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Advertisement).FullName);
entityChange.ChangeTime.ShouldNotBeNull();
entityChange.ChangeType.ShouldBe(EntityChangeType.Created);
entityChange.EntityId.ShouldBe(advertisementId.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange1 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Advertisement.Banner));
propertyChange1.OriginalValue.ShouldBeNull();
propertyChange1.NewValue.ShouldNotBeNull();
// Check "who did this change"
s.ImpersonatorTenantId.ShouldBe(AbpSession.ImpersonatorTenantId);
s.ImpersonatorUserId.ShouldBe(AbpSession.ImpersonatorUserId);
s.TenantId.ShouldBe(AbpSession.TenantId);
s.UserId.ShouldBe(AbpSession.UserId);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Tracked_Entities_Create_To_Database()
{
// Forward calls from substitute to implementation
var entityHistoryStore = Resolve<EntityHistoryStore>();
_entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>()))
.Do(callback => AsyncHelper.RunSync(() =>
entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>()))
);
_entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>()))
.Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>()));
UsingDbContext((context) =>
{
context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(0);
context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(0);
context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(0);
});
/* Advertisement does not have Audited attribute. */
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Advertisement));
var justNow = Clock.Now;
Thread.Sleep(1);
WithUnitOfWork(() =>
{
_advertisementRepository.InsertAndGetId(new Advertisement { Banner = "tracked-advertisement" });
});
UsingDbContext((context) =>
{
context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1);
context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1);
context.EntityChangeSets.Single().CreationTime.ShouldBeGreaterThan(justNow);
context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(1);
});
}
[Fact]
public void Should_Write_History_For_TPH_Tracked_Entities_Create()
{
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student));
var student = new Student()
{
Name = "TestName",
IdCard = "TestIdCard",
Address = "TestAddress",
Grade = 1
};
_studentRepository.Insert(student);
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Student).FullName);
entityChange.ChangeTime.ShouldNotBeNull();
entityChange.ChangeType.ShouldBe(EntityChangeType.Created);
entityChange.EntityId.ShouldBe(student.Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(4); //Name,IdCard,Address,Grade
var propertyChange1 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.Name));
propertyChange1.OriginalValue.ShouldBeNull();
propertyChange1.NewValue.ShouldNotBeNull();
var propertyChange2 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.IdCard));
propertyChange2.OriginalValue.ShouldBeNull();
propertyChange2.NewValue.ShouldNotBeNull();
var propertyChange3 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.Address));
propertyChange3.OriginalValue.ShouldBeNull();
propertyChange3.NewValue.ShouldNotBeNull();
var propertyChange4 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Student.Grade));
propertyChange4.OriginalValue.ShouldBeNull();
propertyChange4.NewValue.ShouldNotBeNull();
// Check "who did this change"
s.ImpersonatorTenantId.ShouldBe(AbpSession.ImpersonatorTenantId);
s.ImpersonatorUserId.ShouldBe(AbpSession.ImpersonatorUserId);
s.TenantId.ShouldBe(AbpSession.TenantId);
s.UserId.ShouldBe(AbpSession.UserId);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_TPH_Tracked_Entities_Create_To_Database()
{
// Forward calls from substitute to implementation
var entityHistoryStore = Resolve<EntityHistoryStore>();
_entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>()))
.Do(callback => AsyncHelper.RunSync(() =>
entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>()))
);
_entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>()))
.Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>()));
UsingDbContext((context) =>
{
context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(0);
context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(0);
context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(0);
});
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student));
var justNow = Clock.Now;
Thread.Sleep(1);
var student = new Student()
{
Name = "TestName",
IdCard = "TestIdCard",
Address = "TestAddress",
Grade = 1
};
_studentRepository.Insert(student);
UsingDbContext((context) =>
{
context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1);
context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1);
context.EntityChangeSets.Single().CreationTime.ShouldBeGreaterThan(justNow);
context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(4); //Name,IdCard,Address,Grade
});
}
[Fact]
public void Should_Write_History_For_Tracked_Entities_Update()
{
/* Advertisement does not have Audited attribute. */
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Advertisement));
WithUnitOfWork(() =>
{
var advertisement1 = _advertisementRepository.Single(a => a.Banner == "test-advertisement-1");
advertisement1.Banner = "test-advertisement-1-updated";
_advertisementRepository.Update(advertisement1);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Advertisement).FullName);
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Advertisement.Banner));
propertyChange.NewValue.ShouldBe("test-advertisement-1-updated".ToJsonString());
propertyChange.OriginalValue.ShouldBe("test-advertisement-1".ToJsonString());
propertyChange.PropertyTypeFullName.ShouldBe(typeof(Advertisement).GetProperty(nameof(Advertisement.Banner)).PropertyType.FullName);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Entities_Create()
{
/* Blog has Audited attribute. */
var blog2Id = CreateBlogAndGetId();
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName);
entityChange.ChangeTime.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IHasCreationTime>().CreationTime);
entityChange.ChangeType.ShouldBe(EntityChangeType.Created);
entityChange.EntityId.ShouldBe(blog2Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(3);
var propertyChange1 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.Url));
propertyChange1.OriginalValue.ShouldBeNull();
propertyChange1.NewValue.ShouldNotBeNull();
var propertyChange2 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.More));
propertyChange2.OriginalValue.ShouldBeNull();
propertyChange2.NewValue.ShouldNotBeNull();
var propertyChange3 = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.CreationTime));
propertyChange3.OriginalValue.ShouldBeNull();
propertyChange3.NewValue.ShouldNotBeNull();
// Check "who did this change"
s.ImpersonatorTenantId.ShouldBe(AbpSession.ImpersonatorTenantId);
s.ImpersonatorUserId.ShouldBe(AbpSession.ImpersonatorUserId);
s.TenantId.ShouldBe(AbpSession.TenantId);
s.UserId.ShouldBe(AbpSession.UserId);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Entities_Create_To_Database()
{
// Forward calls from substitute to implementation
var entityHistoryStore = Resolve<EntityHistoryStore>();
_entityHistoryStore.When(x => x.SaveAsync(Arg.Any<EntityChangeSet>()))
.Do(callback => AsyncHelper.RunSync(() =>
entityHistoryStore.SaveAsync(callback.Arg<EntityChangeSet>()))
);
_entityHistoryStore.When(x => x.Save(Arg.Any<EntityChangeSet>()))
.Do(callback => entityHistoryStore.Save(callback.Arg<EntityChangeSet>()));
UsingDbContext((context) =>
{
context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(0);
context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(0);
context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(0);
});
var justNow = Clock.Now;
Thread.Sleep(1);
var blog2Id = CreateBlogAndGetId();
UsingDbContext((context) =>
{
context.EntityChanges.Count(e => e.TenantId == 1).ShouldBe(1);
context.EntityChangeSets.Count(e => e.TenantId == 1).ShouldBe(1);
context.EntityChangeSets.Single().CreationTime.ShouldBeGreaterThan(justNow);
context.EntityPropertyChanges.Count(e => e.TenantId == 1).ShouldBe(3);
});
}
[Fact]
public void Should_Write_History_For_Audited_Entities_Update()
{
/* Blog has Audited attribute. */
var newValue = "http://testblog1-changed.myblogs.com";
var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue);
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName);
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.Url));
propertyChange.NewValue.ShouldBe(newValue.ToJsonString());
propertyChange.OriginalValue.ShouldBe(originalValue.ToJsonString());
propertyChange.PropertyTypeFullName.ShouldBe(typeof(Blog).GetProperty(nameof(Blog.Url)).PropertyType.FullName);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Entities_Update_Only_Modified_Properties()
{
var originalValue = "http://testblog2.myblogs.com";
var newValue = "http://testblog2-changed.myblogs.com";
WithUnitOfWork(() =>
{
var blog2 = _blogRepository.Single(b => b.Url == originalValue);
// Update only the Url of the Blog
blog2.ChangeUrl(newValue);
_blogRepository.Update(blog2);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName);
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.Url));
propertyChange.NewValue.ShouldBe(newValue.ToJsonString());
propertyChange.OriginalValue.ShouldBe(originalValue.ToJsonString());
propertyChange.PropertyTypeFullName.ShouldBe(typeof(Blog).GetProperty(nameof(Blog.Url)).PropertyType.FullName);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Entities_Update_Complex()
{
/* Blog has Audited attribute. */
int blog1Id = 0;
var newValue = new BlogEx { BloggerName = "blogger-2" };
BlogEx originalValue = null;
WithUnitOfWork(() =>
{
var blog1 = _blogRepository.Single(b => b.More.BloggerName == "blogger-1");
blog1Id = blog1.Id;
originalValue = new BlogEx { BloggerName = blog1.More.BloggerName };
blog1.More.BloggerName = newValue.BloggerName;
_blogRepository.Update(blog1);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName);
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.EntityId.ShouldBe(blog1Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Blog.More));
propertyChange.NewValue.ShouldBe(newValue.ToJsonString());
propertyChange.OriginalValue.ShouldBe(originalValue.ToJsonString());
propertyChange.PropertyTypeFullName.ShouldBe(typeof(Blog).GetProperty(nameof(Blog.More)).PropertyType.FullName);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Property_Foreign_Key()
{
/* Post.BlogId has Audited attribute. */
var blogId = CreateBlogAndGetId();
Guid post1Id = Guid.Empty;
WithUnitOfWork(() =>
{
var blog2 = _blogRepository.Single(b => b.Id == 2);
var post1 = _postRepository.Single(p => p.Body == "test-post-1-body");
post1Id = post1.Id;
// Change foreign key by assigning navigation property
post1.Blog = blog2;
_postRepository.Update(post1);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Post).FullName);
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.EntityId.ShouldBe(post1Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Post.BlogId));
propertyChange.NewValue.ShouldBe("2");
propertyChange.OriginalValue.ShouldBe("1");
propertyChange.PropertyTypeFullName.ShouldBe(typeof(Post).GetProperty(nameof(Post.BlogId)).PropertyType.FullName);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Property_Foreign_Key_Collection()
{
WithUnitOfWork(() =>
{
var blog1 = _blogRepository.Single(b => b.Name == "test-blog-1");
var post10 = new Post { Blog = blog1, Title = "test-post-10-title", Body = "test-post-10-body" };
// Change navigation property by adding into collection
blog1.Posts.Add(post10);
_blogRepository.Update(blog1);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(2);
/* Post is not in Configuration.Selectors */
/* Post.Blog has Audited attribute */
var entityChangePost = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Post).FullName);
entityChangePost.ChangeType.ShouldBe(EntityChangeType.Created);
entityChangePost.PropertyChanges.Count.ShouldBe(1);
var propertyChange1 = entityChangePost.PropertyChanges.Single(pc => pc.PropertyName == nameof(Post.BlogId));
propertyChange1.OriginalValue.ShouldBeNull();
propertyChange1.NewValue.ShouldNotBeNull();
/* Blog has Audited attribute. */
var entityChangeBlog = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName);
entityChangeBlog.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChangeBlog.PropertyChanges.Count.ShouldBe(0);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_Audited_Property_Foreign_Key_Shadow()
{
/* Comment has Audited attribute. */
var post1KeyValue = new Dictionary<string, object>();
var post2KeyValue = new Dictionary<string, object>();
WithUnitOfWork(() =>
{
var post2 = _postRepository.Single(p => p.Body == "test-post-2-body");
post2KeyValue.Add("Id", post2.Id);
var comment1 = _commentRepository.Single(c => c.Content == "test-comment-1-content");
post1KeyValue.Add("Id", comment1.Post.Id);
// Change foreign key by assigning navigation property
comment1.Post = post2;
_commentRepository.Update(comment1);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Comment).FullName);
entityChange.PropertyChanges.Count.ShouldBe(1);
var propertyChange = entityChange.PropertyChanges.Single(pc => pc.PropertyName == nameof(Comment.Post));
propertyChange.NewValue.ShouldBe(post2KeyValue.ToJsonString());
propertyChange.OriginalValue.ShouldBe(post1KeyValue.ToJsonString());
propertyChange.PropertyTypeFullName.ShouldBe(typeof(Comment).GetProperty(nameof(Comment.Post)).PropertyType.FullName);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_But_Not_For_Property_If_Disabled_History_Tracking()
{
/* Blog.Name has DisableAuditing attribute. */
WithUnitOfWork(() =>
{
var blog1 = _blogRepository.Single(b => b.Name == "test-blog-1");
blog1.Name = null;
_blogRepository.Update(blog1);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(Blog).FullName);
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.EntityId.ShouldBe(entityChange.EntityEntry.As<DbEntityEntry>().Entity.As<IEntity>().Id.ToJsonString());
entityChange.PropertyChanges.Count.ShouldBe(0);
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_TPH_Tracked_Entities_With_One_To_Many_Relationship_Create()
{
var studentId = CreateStudentAndGetId();
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student), typeof(StudentLectureNote));
_entityHistoryStore.ClearReceivedCalls();
WithUnitOfWork(() =>
{
var student = _studentRepository.Get(studentId);
var lectureNote = new StudentLectureNote()
{
Student = student,
CourseName = "Course1",
Note = 100
};
student.LectureNotes.Add(lectureNote);
_studentRepository.Update(student);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(StudentLectureNote).FullName);
entityChange.ChangeTime.ShouldNotBeNull();
entityChange.ChangeType.ShouldBe(EntityChangeType.Created);
entityChange.PropertyChanges.Count.ShouldBe(3);
entityChange.PropertyChanges.Single(p => p.PropertyName == nameof(StudentLectureNote.StudentId))
.NewValue.ShouldBe(studentId.ToString());
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_TPH_Tracked_Entities_With_One_To_One_Relationship_Changes_Create()
{
var studentId = CreateStudentAndGetId();
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student), typeof(CitizenshipInformation));
_entityHistoryStore.ClearReceivedCalls();
WithUnitOfWork(() =>
{
var student = _studentRepository.Get(studentId);
var citizenshipInformation = new CitizenshipInformation()
{
Student = student,
CitizenShipId = "123qwe"
};
student.CitizenshipInformation = citizenshipInformation;
_studentRepository.Update(student);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(CitizenshipInformation).FullName);
entityChange.ChangeTime.ShouldNotBeNull();
entityChange.ChangeType.ShouldBe(EntityChangeType.Created);
entityChange.PropertyChanges.Count.ShouldBe(1);
entityChange.PropertyChanges.Single(p => p.PropertyName == nameof(CitizenshipInformation.CitizenShipId))
.NewValue.ShouldBe("\"123qwe\"");
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
[Fact]
public void Should_Write_History_For_TPH_Tracked_Entities_With_One_To_One_Relationship_Changes_Update()
{
var studentId = CreateStudentWithCitizenshipAndGetId();
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Student), typeof(CitizenshipInformation));
_entityHistoryStore.ClearReceivedCalls();
WithUnitOfWork(() =>
{
var student = _studentRepository.GetAll().Include(x => x.CitizenshipInformation).Single(x => x.Id == studentId);
student.CitizenshipInformation.CitizenShipId = "qwe123";
_studentRepository.Update(student);
});
Predicate<EntityChangeSet> predicate = s =>
{
s.EntityChanges.Count.ShouldBe(1);
var entityChange = s.EntityChanges.Single(ec => ec.EntityTypeFullName == typeof(CitizenshipInformation).FullName);
entityChange.ChangeTime.ShouldNotBeNull();
entityChange.ChangeType.ShouldBe(EntityChangeType.Updated);
entityChange.PropertyChanges.Count.ShouldBe(1);
var idChange = entityChange.PropertyChanges.Single(p => p.PropertyName == nameof(CitizenshipInformation.CitizenShipId));
idChange.OriginalValue.ShouldBe("\"123qwe\"");
idChange.NewValue.ShouldBe("\"qwe123\"");
return true;
};
_entityHistoryStore.Received().Save(Arg.Is<EntityChangeSet>(s => predicate(s)));
}
private int CreateStudentAndGetId()
{
var student = new Student()
{
Name = "TestName",
IdCard = "TestIdCard",
Address = "TestAddress",
Grade = 1,
};
return _studentRepository.InsertAndGetId(student);
}
private int CreateStudentWithCitizenshipAndGetId()
{
var student = new Student()
{
Name = "TestName",
IdCard = "TestIdCard",
Address = "TestAddress",
Grade = 1,
CitizenshipInformation = new CitizenshipInformation()
{
CitizenShipId = "123qwe"
}
};
return _studentRepository.InsertAndGetId(student);
}
#endregion
#region CASES DON'T WRITE HISTORY
[Fact]
public void Should_Not_Write_History_If_Disabled()
{
Resolve<IEntityHistoryConfiguration>().IsEnabled = false;
/* Blog has Audited attribute. */
var newValue = "http://testblog1-changed.myblogs.com";
var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue);
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Not_Audited_And_Not_Selected()
{
/* Advertisement does not have Audited attribute. */
Resolve<IEntityHistoryConfiguration>().Selectors.Clear();
WithUnitOfWork(() =>
{
_advertisementRepository.Insert(new Advertisement
{
Banner = "not-selected-advertisement"
});
});
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Ignored()
{
Resolve<IEntityHistoryConfiguration>().IgnoredTypes.Add(typeof(Blog));
/* Blog has Audited attribute. */
var newValue = "http://testblog1-changed.myblogs.com";
var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue);
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Selected_But_Ignored()
{
Resolve<IEntityHistoryConfiguration>().Selectors.Add("Selected", typeof(Blog));
Resolve<IEntityHistoryConfiguration>().IgnoredTypes.Add(typeof(Blog));
/* Blog has Audited attribute. */
var newValue = "http://testblog1-changed.myblogs.com";
var originalValue = UpdateBlogUrlAndGetOriginalValue(newValue);
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Property_Has_No_Audited_Attribute()
{
/* Advertisement.Banner does not have Audited attribute. */
WithUnitOfWork(() =>
{
var advertisement1 = _advertisementRepository.Single(a => a.Banner == "test-advertisement-1");
advertisement1.Banner = null;
_advertisementRepository.Update(advertisement1);
});
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Invalid_Entity_Has_Property_With_Audited_Attribute_Created()
{
//Act
UsingDbContext((context) =>
{
context.Categories.Add(new Category { DisplayName = "My Category" });
context.SaveChanges();
});
//Assert
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Invalid_Entity_Has_Property_With_Audited_Attribute_Updated()
{
//Arrange
UsingDbContext((context) =>
{
context.Categories.Add(new Category { DisplayName = "My Category" });
context.SaveChanges();
});
_entityHistoryStore.ClearReceivedCalls();
//Act
UsingDbContext((context) =>
{
var category = context.Categories.Single(c => c.DisplayName == "My Category");
category.DisplayName = "Invalid Category";
context.SaveChanges();
});
//Assert
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
[Fact]
public void Should_Not_Write_History_If_Invalid_Entity_Has_Property_With_Audited_Attribute_Deleted()
{
//Arrange
UsingDbContext((context) =>
{
context.Categories.Add(new Category { DisplayName = "My Category" });
context.SaveChanges();
});
_entityHistoryStore.ClearReceivedCalls();
//Act
UsingDbContext((context) =>
{
var category = context.Categories.Single(c => c.DisplayName == "My Category");
context.Categories.Remove(category);
context.SaveChanges();
});
//Assert
_entityHistoryStore.DidNotReceive().Save(Arg.Any<EntityChangeSet>());
}
#endregion
private int CreateBlogAndGetId()
{
int blog2Id = 0;
WithUnitOfWork(() =>
{
var blog2 = new Blog("test-blog-2", "http://testblog2.myblogs.com", "blogger-2");
blog2Id = _blogRepository.InsertAndGetId(blog2);
});
return blog2Id;
}
private string UpdateBlogUrlAndGetOriginalValue(string newValue)
{
string originalValue = null;
WithUnitOfWork(() =>
{
var blog1 = _blogRepository.Single(b => b.Name == "test-blog-1");
originalValue = blog1.Url;
blog1.ChangeUrl(newValue);
_blogRepository.Update(blog1);
});
return originalValue;
}
}
#region Helpers
internal static class IEnumerableExtensions
{
internal static EntityPropertyChange FirstOrDefault(this IEnumerable<EntityPropertyChange> enumerable)
{
var enumerator = enumerable.GetEnumerator();
enumerator.MoveNext();
return enumerator.Current;
}
}
#endregion
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: Capture execution context for a thread
**
**
===========================================================*/
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.ExceptionServices;
using System.Runtime.Serialization;
using Thread = Internal.Runtime.Augments.RuntimeThread;
namespace System.Threading
{
public delegate void ContextCallback(Object state);
internal struct ExecutionContextSwitcher
{
internal ExecutionContext m_ec;
internal SynchronizationContext m_sc;
internal void Undo(Thread currentThread)
{
Debug.Assert(currentThread == Thread.CurrentThread);
// The common case is that these have not changed, so avoid the cost of a write if not needed.
if (currentThread.SynchronizationContext != m_sc)
{
currentThread.SynchronizationContext = m_sc;
}
if (currentThread.ExecutionContext != m_ec)
{
ExecutionContext.Restore(currentThread, m_ec);
}
}
}
public sealed class ExecutionContext : IDisposable, ISerializable
{
internal static readonly ExecutionContext Default = new ExecutionContext();
private readonly IAsyncLocalValueMap m_localValues;
private readonly IAsyncLocal[] m_localChangeNotifications;
private readonly bool m_isFlowSuppressed;
private ExecutionContext()
{
m_localValues = AsyncLocalValueMap.Empty;
m_localChangeNotifications = Array.Empty<IAsyncLocal>();
}
private ExecutionContext(
IAsyncLocalValueMap localValues,
IAsyncLocal[] localChangeNotifications,
bool isFlowSuppressed)
{
m_localValues = localValues;
m_localChangeNotifications = localChangeNotifications;
m_isFlowSuppressed = isFlowSuppressed;
}
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
throw new PlatformNotSupportedException();
}
public static ExecutionContext Capture()
{
ExecutionContext executionContext = Thread.CurrentThread.ExecutionContext;
return
executionContext == null ? Default :
executionContext.m_isFlowSuppressed ? null :
executionContext;
}
private ExecutionContext ShallowClone(bool isFlowSuppressed)
{
Debug.Assert(isFlowSuppressed != m_isFlowSuppressed);
if (!isFlowSuppressed &&
m_localValues == Default.m_localValues &&
m_localChangeNotifications == Default.m_localChangeNotifications)
{
return null; // implies the default context
}
return new ExecutionContext(m_localValues, m_localChangeNotifications, isFlowSuppressed);
}
public static AsyncFlowControl SuppressFlow()
{
Thread currentThread = Thread.CurrentThread;
ExecutionContext executionContext = currentThread.ExecutionContext ?? Default;
if (executionContext.m_isFlowSuppressed)
{
throw new InvalidOperationException(SR.InvalidOperation_CannotSupressFlowMultipleTimes);
}
Contract.EndContractBlock();
executionContext = executionContext.ShallowClone(isFlowSuppressed: true);
var asyncFlowControl = new AsyncFlowControl();
currentThread.ExecutionContext = executionContext;
asyncFlowControl.Initialize(currentThread);
return asyncFlowControl;
}
public static void RestoreFlow()
{
Thread currentThread = Thread.CurrentThread;
ExecutionContext executionContext = currentThread.ExecutionContext;
if (executionContext == null || !executionContext.m_isFlowSuppressed)
{
throw new InvalidOperationException(SR.InvalidOperation_CannotRestoreUnsupressedFlow);
}
Contract.EndContractBlock();
currentThread.ExecutionContext = executionContext.ShallowClone(isFlowSuppressed: false);
}
public static bool IsFlowSuppressed()
{
ExecutionContext executionContext = Thread.CurrentThread.ExecutionContext;
return executionContext != null && executionContext.m_isFlowSuppressed;
}
public static void Run(ExecutionContext executionContext, ContextCallback callback, Object state)
{
if (executionContext == null)
throw new InvalidOperationException(SR.InvalidOperation_NullContext);
Thread currentThread = Thread.CurrentThread;
ExecutionContextSwitcher ecsw = default(ExecutionContextSwitcher);
try
{
EstablishCopyOnWriteScope(currentThread, ref ecsw);
ExecutionContext.Restore(currentThread, executionContext);
callback(state);
}
catch
{
// Note: we have a "catch" rather than a "finally" because we want
// to stop the first pass of EH here. That way we can restore the previous
// context before any of our callers' EH filters run. That means we need to
// end the scope separately in the non-exceptional case below.
ecsw.Undo(currentThread);
throw;
}
ecsw.Undo(currentThread);
}
internal static void Restore(Thread currentThread, ExecutionContext executionContext)
{
Debug.Assert(currentThread == Thread.CurrentThread);
ExecutionContext previous = currentThread.ExecutionContext ?? Default;
currentThread.ExecutionContext = executionContext;
// New EC could be null if that's what ECS.Undo saved off.
// For the purposes of dealing with context change, treat this as the default EC
executionContext = executionContext ?? Default;
if (previous != executionContext)
{
OnContextChanged(previous, executionContext);
}
}
internal static void EstablishCopyOnWriteScope(Thread currentThread, ref ExecutionContextSwitcher ecsw)
{
Debug.Assert(currentThread == Thread.CurrentThread);
ecsw.m_ec = currentThread.ExecutionContext;
ecsw.m_sc = currentThread.SynchronizationContext;
}
private static void OnContextChanged(ExecutionContext previous, ExecutionContext current)
{
Debug.Assert(previous != null);
Debug.Assert(current != null);
Debug.Assert(previous != current);
foreach (IAsyncLocal local in previous.m_localChangeNotifications)
{
object previousValue;
object currentValue;
previous.m_localValues.TryGetValue(local, out previousValue);
current.m_localValues.TryGetValue(local, out currentValue);
if (previousValue != currentValue)
local.OnValueChanged(previousValue, currentValue, true);
}
if (current.m_localChangeNotifications != previous.m_localChangeNotifications)
{
try
{
foreach (IAsyncLocal local in current.m_localChangeNotifications)
{
// If the local has a value in the previous context, we already fired the event for that local
// in the code above.
object previousValue;
if (!previous.m_localValues.TryGetValue(local, out previousValue))
{
object currentValue;
current.m_localValues.TryGetValue(local, out currentValue);
if (previousValue != currentValue)
local.OnValueChanged(previousValue, currentValue, true);
}
}
}
catch (Exception ex)
{
Environment.FailFast(
SR.ExecutionContext_ExceptionInAsyncLocalNotification,
ex);
}
}
}
internal static object GetLocalValue(IAsyncLocal local)
{
ExecutionContext current = Thread.CurrentThread.ExecutionContext;
if (current == null)
return null;
object value;
current.m_localValues.TryGetValue(local, out value);
return value;
}
internal static void SetLocalValue(IAsyncLocal local, object newValue, bool needChangeNotifications)
{
ExecutionContext current = Thread.CurrentThread.ExecutionContext ?? ExecutionContext.Default;
object previousValue;
bool hadPreviousValue = current.m_localValues.TryGetValue(local, out previousValue);
if (previousValue == newValue)
return;
IAsyncLocalValueMap newValues = current.m_localValues.Set(local, newValue);
//
// Either copy the change notification array, or create a new one, depending on whether we need to add a new item.
//
IAsyncLocal[] newChangeNotifications = current.m_localChangeNotifications;
if (needChangeNotifications)
{
if (hadPreviousValue)
{
Debug.Assert(Array.IndexOf(newChangeNotifications, local) >= 0);
}
else
{
int newNotificationIndex = newChangeNotifications.Length;
Array.Resize(ref newChangeNotifications, newNotificationIndex + 1);
newChangeNotifications[newNotificationIndex] = local;
}
}
Thread.CurrentThread.ExecutionContext =
new ExecutionContext(newValues, newChangeNotifications, current.m_isFlowSuppressed);
if (needChangeNotifications)
{
local.OnValueChanged(previousValue, newValue, false);
}
}
public ExecutionContext CreateCopy()
{
return this; // since CoreCLR's ExecutionContext is immutable, we don't need to create copies.
}
public void Dispose()
{
// For CLR compat only
}
}
public struct AsyncFlowControl : IDisposable
{
private Thread _thread;
internal void Initialize(Thread currentThread)
{
Debug.Assert(currentThread == Thread.CurrentThread);
_thread = currentThread;
}
public void Undo()
{
if (_thread == null)
{
throw new InvalidOperationException(SR.InvalidOperation_CannotUseAFCMultiple);
}
if (Thread.CurrentThread != _thread)
{
throw new InvalidOperationException(SR.InvalidOperation_CannotUseAFCOtherThread);
}
// An async flow control cannot be undone when a different execution context is applied. The desktop framework
// mutates the execution context when its state changes, and only changes the instance when an execution context
// is applied (for instance, through ExecutionContext.Run). The framework prevents a suppressed-flow execution
// context from being applied by returning null from ExecutionContext.Capture, so the only type of execution
// context that can be applied is one whose flow is not suppressed. After suppressing flow and changing an async
// local's value, the desktop framework verifies that a different execution context has not been applied by
// checking the execution context instance against the one saved from when flow was suppressed. In .NET Core,
// since the execution context instance will change after changing the async local's value, it verifies that a
// different execution context has not been applied, by instead ensuring that the current execution context's
// flow is suppressed.
if (!ExecutionContext.IsFlowSuppressed())
{
throw new InvalidOperationException(SR.InvalidOperation_AsyncFlowCtrlCtxMismatch);
}
Contract.EndContractBlock();
_thread = null;
ExecutionContext.RestoreFlow();
}
public void Dispose()
{
Undo();
}
public override bool Equals(object obj)
{
return obj is AsyncFlowControl && Equals((AsyncFlowControl)obj);
}
public bool Equals(AsyncFlowControl obj)
{
return _thread == obj._thread;
}
public override int GetHashCode()
{
return _thread?.GetHashCode() ?? 0;
}
public static bool operator ==(AsyncFlowControl a, AsyncFlowControl b)
{
return a.Equals(b);
}
public static bool operator !=(AsyncFlowControl a, AsyncFlowControl b)
{
return !(a == b);
}
}
}
| |
// ------------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the ""License""); you may not use this
// file except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR
// CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR
// NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing permissions and
// limitations under the License.
// ------------------------------------------------------------------------------------
namespace Amqp.Types
{
using System;
using System.Collections;
using System.Text;
using System.Globalization;
/// <summary>
/// The delegate to create a decribed object.
/// </summary>
/// <returns></returns>
public delegate Described CreateDescribed();
delegate void Encode(ByteBuffer buffer, object value, bool smallEncoding);
delegate object Decode(ByteBuffer buffer, byte formatCode);
/// <summary>
/// Encodes or decodes AMQP types.
/// </summary>
public static class Encoder
{
class Serializer
{
public Type Type;
public Encode Encoder;
public Decode Decoder;
}
#if NETMF
// NETMF DateTime ticks origin is 1601/1/1
const long epochTicks = 116444736000000000; // 1970-1-1 00:00:00 UTC
#else
const long epochTicks = 621355968000000000; // 1970-1-1 00:00:00 UTC
#endif
const long ticksPerMillisecond = 10000;
static Serializer[] serializers;
static Map codecByType;
static byte[][] codecIndexTable;
static Map knownDescrided;
static Encoder()
{
if (serializers == null)
{
Initialize();
}
}
internal static void Initialize()
{
knownDescrided = new Map();
serializers = new Serializer[]
{
// 0: null
new Serializer()
{
Type = null,
Encoder = delegate(ByteBuffer b, object o, bool s) { AmqpBitConverter.WriteUByte(b, FormatCode.Null); },
Decoder = delegate(ByteBuffer b, byte c) { return null; }
},
// 1: boolean
new Serializer()
{
Type = typeof(bool),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteBoolean(b, (bool)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadBoolean(b, c); }
},
// 2: ubyte
new Serializer()
{
Type = typeof(byte),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteUByte(b, (byte)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadUByte(b, c); }
},
// 3: ushort
new Serializer()
{
Type = typeof(ushort),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteUShort(b, (ushort)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadUShort(b, c); }
},
// 4: uint
new Serializer()
{
Type = typeof(uint),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteUInt(b, (uint)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadUInt(b, c); }
},
// 5: ulong
new Serializer()
{
Type = typeof(ulong),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteULong(b, (ulong)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadULong(b, c); }
},
// 6: byte
new Serializer()
{
Type = typeof(sbyte),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteByte(b, (sbyte)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadByte(b, c); }
},
// 7: short
new Serializer()
{
Type = typeof(short),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteShort(b, (short)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadShort(b, c); }
},
// 8: int
new Serializer()
{
Type = typeof(int),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteInt(b, (int)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadInt(b, c); }
},
// 9: long
new Serializer()
{
Type = typeof(long),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteLong(b, (long)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadLong(b, c); }
},
// 10: float
new Serializer()
{
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteFloat(b, (float)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadFloat(b, c); }
},
// 11: double
new Serializer()
{
Type = typeof(double),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteDouble(b, (double)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadDouble(b, c); }
},
// 12: char
new Serializer()
{
Type = typeof(char),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteChar(b, (char)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadChar(b, c); }
},
// 13: timestamp
new Serializer()
{
Type = typeof(DateTime),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteTimestamp(b, (DateTime)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadTimestamp(b, c); }
},
// 14: uuid
new Serializer()
{
Type = typeof(Guid),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteUuid(b, (Guid)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadUuid(b, c); }
},
// 15: binary
new Serializer()
{
Type = typeof(byte[]),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteBinary(b, (byte[])o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadBinary(b, c); }
},
// 16: string
new Serializer()
{
Type = typeof(string),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteString(b, (string)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadString(b, c); }
},
// 17: symbol
new Serializer()
{
Type = typeof(Symbol),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteSymbol(b, (Symbol)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadSymbol(b, c); }
},
// 18: list
new Serializer()
{
Type = typeof(List),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteList(b, (IList)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadList(b, c); }
},
// 19: map
new Serializer()
{
Type = typeof(Map),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteMap(b, (Map)o, s); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadMap(b, c); }
},
// 20: array
new Serializer()
{
Type = typeof(Array),
Encoder = delegate(ByteBuffer b, object o, bool s) { WriteArray(b, (Array)o); },
Decoder = delegate(ByteBuffer b, byte c) { return ReadArray(b, c); }
},
// 21: invalid
null
};
codecByType = new Map()
{
{ typeof(bool), serializers[1] },
{ typeof(byte), serializers[2] },
{ typeof(ushort), serializers[3] },
{ typeof(uint), serializers[4] },
{ typeof(ulong), serializers[5] },
{ typeof(sbyte), serializers[6] },
{ typeof(short), serializers[7] },
{ typeof(int), serializers[8] },
{ typeof(long), serializers[9] },
{ typeof(float), serializers[10] },
{ typeof(double), serializers[11] },
{ typeof(char), serializers[12] },
{ typeof(DateTime), serializers[13] },
{ typeof(Guid), serializers[14] },
{ typeof(byte[]), serializers[15] },
{ typeof(string), serializers[16] },
{ typeof(Symbol), serializers[17] },
{ typeof(List), serializers[18] },
{ typeof(Map), serializers[19] },
{ typeof(Fields), serializers[19] },
};
codecIndexTable = new byte[][]
{
// 0x40:null, 0x41:boolean.true, 0x42:boolean.false, 0x43:uint0, 0x44:ulong0, 0x45:list0
new byte[] { 0, 1, 1, 4, 5, 18 },
// 0x50:ubyte, 0x51:byte, 0x52:small.uint, 0x53:small.ulong, 0x54:small.int, 0x55:small.long, 0x56:boolean
new byte[] { 2, 6, 4, 5, 8, 9, 1 },
// 0x60:ushort, 0x61:short
new byte[] { 3, 7 },
// 0x70:uint, 0x71:int, 0x72:float, 0x73:char, 0x74:decimal32
new byte[] { 4, 8, 10, 12 },
// 0x80:ulong, 0x81:long, 0x82:double, 0x83:timestamp, 0x84:decimal64
new byte[] { 5, 9, 11, 13 },
// 0x98:uuid
new byte[] { 21, 21, 21, 21, 21, 21, 21, 21, 14 },
// 0xa0:bin8, 0xa1:str8, 0xa3:sym8
new byte[] { 15, 16, 21, 17 },
// 0xb0:bin32, 0xb1:str32, 0xb3:sym32
new byte[] { 15, 16, 21, 17 },
// 0xc0:list8, 0xc1:map8
new byte[] { 18, 19 },
// 0xd0:list32, 0xd1:map32
new byte[] { 18, 19 },
// 0xe0:array8
new byte[] { 20 },
// 0xf0:array32
new byte[] { 20 }
};
}
internal static bool TryGetCodec(Type type, out Encode encoder, out Decode decoder)
{
Serializer codec = (Serializer)codecByType[type];
if (codec == null && type.IsArray)
{
codec = serializers[20];
}
if (codec != null)
{
encoder = codec.Encoder;
decoder = codec.Decoder;
return true;
}
else
{
encoder = null;
decoder = null;
return false;
}
}
/// <summary>
/// Adds a factory for a custom described type, usually for decoding.
/// </summary>
/// <param name="descriptor">The descriptor of the type.</param>
/// <param name="ctor">The delegate to invoke to create the object.</param>
public static void AddKnownDescribed(Descriptor descriptor, CreateDescribed ctor)
{
lock (knownDescrided)
{
knownDescrided.Add(descriptor.Name, ctor);
knownDescrided.Add(descriptor.Code, ctor);
}
}
/// <summary>
/// Converts a DateTime value to AMQP timestamp (milliseconds from Unix epoch)
/// </summary>
/// <param name="dateTime">The DateTime value to convert.</param>
/// <returns></returns>
public static long DateTimeToTimestamp(DateTime dateTime)
{
return (long)((dateTime.ToUniversalTime().Ticks - epochTicks) / ticksPerMillisecond);
}
/// <summary>
/// Converts an AMQP timestamp ((milliseconds from Unix epoch)) to a DateTime.
/// </summary>
/// <param name="timestamp">The AMQP timestamp to convert.</param>
/// <returns></returns>
public static DateTime TimestampToDateTime(long timestamp)
{
return new DateTime(epochTicks + timestamp * ticksPerMillisecond, DateTimeKind.Utc);
}
internal static byte ReadFormatCode(ByteBuffer buffer)
{
return AmqpBitConverter.ReadUByte(buffer);
}
/// <summary>
/// Writes an AMQP object to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The AMQP value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteObject(ByteBuffer buffer, object value, bool smallEncoding = true)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else
{
Encode encoder;
Decode decoder;
if (TryGetCodec(value.GetType(), out encoder, out decoder))
{
encoder(buffer, value, smallEncoding);
}
else if (value is Described)
{
((Described)value).Encode(buffer);
}
else
{
throw new AmqpException(ErrorCode.NotImplemented,
Fx.Format(SRAmqp.EncodingTypeNotSupported, value.GetType()));
}
}
}
/// <summary>
/// Writes a boolean value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The boolean value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteBoolean(ByteBuffer buffer, bool value, bool smallEncoding)
{
if (smallEncoding)
{
AmqpBitConverter.WriteUByte(buffer, value ? FormatCode.BooleanTrue : FormatCode.BooleanFalse);
}
else
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Boolean);
AmqpBitConverter.WriteUByte(buffer, (byte)(value ? 1 : 0));
}
}
/// <summary>
/// Writes an unsigned byte value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The unsigned byte value.</param>
public static void WriteUByte(ByteBuffer buffer, byte value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.UByte);
AmqpBitConverter.WriteUByte(buffer, value);
}
/// <summary>
/// Writes an unsigned 16-bit integer value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The unsigned 16-bit integer value.</param>
public static void WriteUShort(ByteBuffer buffer, ushort value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.UShort);
AmqpBitConverter.WriteUShort(buffer, value);
}
/// <summary>
/// Writes an unsigned 32-bit integer value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The unsigned 32-bit integer value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteUInt(ByteBuffer buffer, uint value, bool smallEncoding)
{
if (!smallEncoding || value > byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.UInt);
AmqpBitConverter.WriteUInt(buffer, value);
}
else if (value == 0)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.UInt0);
}
else if (value <= byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.SmallUInt);
AmqpBitConverter.WriteUByte(buffer, (byte)value);
}
}
/// <summary>
/// Writes an unsigned 64-bit integer value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The unsigned 64-bit integer value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteULong(ByteBuffer buffer, ulong value, bool smallEncoding)
{
if (!smallEncoding || value > byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.ULong);
AmqpBitConverter.WriteULong(buffer, value);
}
else if (value == 0)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.ULong0);
}
else if (value <= byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.SmallULong);
AmqpBitConverter.WriteUByte(buffer, (byte)value);
}
}
/// <summary>
/// Writes a signed byte value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The signed byte value.</param>
public static void WriteByte(ByteBuffer buffer, sbyte value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Byte);
AmqpBitConverter.WriteByte(buffer, value);
}
/// <summary>
/// Writes a signed 16-bit integer value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The signed 16-bit integer value.</param>
public static void WriteShort(ByteBuffer buffer, short value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Short);
AmqpBitConverter.WriteShort(buffer, value);
}
/// <summary>
/// Writes a signed 32-bit integer value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The signed 32-bit integer value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteInt(ByteBuffer buffer, int value, bool smallEncoding)
{
if (smallEncoding && value >= sbyte.MinValue && value <= sbyte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.SmallInt);
AmqpBitConverter.WriteByte(buffer, (sbyte)value);
}
else
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Int);
AmqpBitConverter.WriteInt(buffer, value);
}
}
/// <summary>
/// Writes a signed 64-bit integer value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The signed 64-bit integer value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteLong(ByteBuffer buffer, long value, bool smallEncoding)
{
if (smallEncoding && value >= sbyte.MinValue && value <= sbyte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.SmallLong);
AmqpBitConverter.WriteByte(buffer, (sbyte)value);
}
else
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Long);
AmqpBitConverter.WriteLong(buffer, value);
}
}
/// <summary>
/// Writes a char value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The char value.</param>
public static void WriteChar(ByteBuffer buffer, char value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Char);
AmqpBitConverter.WriteInt(buffer, value); // TODO: utf32
}
/// <summary>
/// Writes a 32-bit floating-point value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The 32-bit floating-point value.</param>
public static void WriteFloat(ByteBuffer buffer, float value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Float);
AmqpBitConverter.WriteFloat(buffer, value);
}
/// <summary>
/// Writes a 64-bit floating-point value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The 64-bit floating-point value.</param>
public static void WriteDouble(ByteBuffer buffer, double value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Double);
AmqpBitConverter.WriteDouble(buffer, value);
}
/// <summary>
/// Writes a timestamp value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The timestamp value which is the milliseconds since UNIX epoch.</param>
public static void WriteTimestamp(ByteBuffer buffer, DateTime value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.TimeStamp);
AmqpBitConverter.WriteLong(buffer, DateTimeToTimestamp(value));
}
/// <summary>
/// Writes a uuid value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The uuid value.</param>
public static void WriteUuid(ByteBuffer buffer, Guid value)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Uuid);
AmqpBitConverter.WriteUuid(buffer, value);
}
/// <summary>
/// Writes a binary value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The binary value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteBinary(ByteBuffer buffer, byte[] value, bool smallEncoding)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else if (smallEncoding && value.Length <= byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Binary8);
AmqpBitConverter.WriteUByte(buffer, (byte)value.Length);
AmqpBitConverter.WriteBytes(buffer, value, 0, value.Length);
}
else
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Binary32);
AmqpBitConverter.WriteUInt(buffer, (uint)value.Length);
AmqpBitConverter.WriteBytes(buffer, value, 0, value.Length);
}
}
/// <summary>
/// Writes a string value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The string value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteString(ByteBuffer buffer, string value, bool smallEncoding)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else
{
byte[] data = Encoding.UTF8.GetBytes(value);
if (smallEncoding && data.Length <= byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.String8Utf8);
AmqpBitConverter.WriteUByte(buffer, (byte)data.Length);
AmqpBitConverter.WriteBytes(buffer, data, 0, data.Length);
}
else
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.String32Utf8);
AmqpBitConverter.WriteUInt(buffer, (uint)data.Length);
AmqpBitConverter.WriteBytes(buffer, data, 0, data.Length);
}
}
}
/// <summary>
/// Writes a symbol value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The symbol value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteSymbol(ByteBuffer buffer, Symbol value, bool smallEncoding)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else
{
byte[] data = Encoding.UTF8.GetBytes(value);
if (smallEncoding && data.Length <= byte.MaxValue)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Symbol8);
AmqpBitConverter.WriteUByte(buffer, (byte)data.Length);
AmqpBitConverter.WriteBytes(buffer, data, 0, data.Length);
}
else
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Symbol32);
AmqpBitConverter.WriteUInt(buffer, (uint)data.Length);
AmqpBitConverter.WriteBytes(buffer, data, 0, data.Length);
}
}
}
/// <summary>
/// Writes a list value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The list value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteList(ByteBuffer buffer, IList value, bool smallEncoding)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else
{
// trim tailing nulls
int last = value.Count - 1;
while (last >= 0 && value[last] == null)
{
--last;
}
if (last < 0 && smallEncoding)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.List0);
}
else
{
int pos = buffer.WritePos;
AmqpBitConverter.WriteUByte(buffer, 0);
AmqpBitConverter.WriteUInt(buffer, 0);
AmqpBitConverter.WriteUInt(buffer, 0);
for (int i = 0; i <= last; ++i)
{
Encoder.WriteObject(buffer, value[i], smallEncoding);
}
int size = buffer.WritePos - pos - 9;
int count = last + 1;
if (smallEncoding && size < byte.MaxValue && count <= byte.MaxValue)
{
buffer.Buffer[pos] = FormatCode.List8;
buffer.Buffer[pos + 1] = (byte)(size + 1);
buffer.Buffer[pos + 2] = (byte)count;
Array.Copy(buffer.Buffer, pos + 9, buffer.Buffer, pos + 3, size);
buffer.Shrink(6);
}
else
{
buffer.Buffer[pos] = FormatCode.List32;
AmqpBitConverter.WriteInt(buffer.Buffer, pos + 1, size + 4);
AmqpBitConverter.WriteInt(buffer.Buffer, pos + 5, count);
}
}
}
}
/// <summary>
/// Writes an array value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The array value.</param>
public static void WriteArray(ByteBuffer buffer, Array value)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else
{
int count = value.Length;
Fx.Assert(count > 0, "must have at least 1 element in array");
int pos = buffer.WritePos;
AmqpBitConverter.WriteUByte(buffer, 0);
AmqpBitConverter.WriteUInt(buffer, 0);
AmqpBitConverter.WriteUInt(buffer, 0);
for (int i = 0; i < count; ++i)
{
object item = value.GetValue(i);
if (i == 0)
{
Encoder.WriteObject(buffer, item, false);
}
else
{
int lastPos = buffer.WritePos - 1;
byte lastByte = buffer.Buffer[lastPos];
buffer.Shrink(1);
Encoder.WriteObject(buffer, item, false);
buffer.Buffer[lastPos] = lastByte;
}
}
int size = buffer.WritePos - pos - 9;
if (size < byte.MaxValue && count <= byte.MaxValue)
{
buffer.Buffer[pos] = FormatCode.Array8;
buffer.Buffer[pos + 1] = (byte)(size + 1);
buffer.Buffer[pos + 2] = (byte)count;
Array.Copy(buffer.Buffer, pos + 9, buffer.Buffer, pos + 3, size);
buffer.Shrink(6);
}
else
{
buffer.Buffer[pos] = FormatCode.Array32;
AmqpBitConverter.WriteInt(buffer.Buffer, pos + 1, size + 4);
AmqpBitConverter.WriteInt(buffer.Buffer, pos + 5, count);
}
}
}
/// <summary>
/// Writes a map value to a buffer.
/// </summary>
/// <param name="buffer">The buffer to write.</param>
/// <param name="value">The map value.</param>
/// <param name="smallEncoding">if true, try using small encoding if possible.</param>
public static void WriteMap(ByteBuffer buffer, Map value, bool smallEncoding)
{
if (value == null)
{
AmqpBitConverter.WriteUByte(buffer, FormatCode.Null);
}
else
{
int pos = buffer.WritePos;
AmqpBitConverter.WriteUByte(buffer, 0);
AmqpBitConverter.WriteUInt(buffer, 0);
AmqpBitConverter.WriteUInt(buffer, 0);
foreach (var key in value.Keys)
{
Encoder.WriteObject(buffer, key);
Encoder.WriteObject(buffer, value[key]);
}
int size = buffer.WritePos - pos - 9;
int count = value.Count * 2;
if (smallEncoding && size < byte.MaxValue && count <= byte.MaxValue)
{
buffer.Buffer[pos] = FormatCode.Map8;
buffer.Buffer[pos + 1] = (byte)(size + 1);
buffer.Buffer[pos + 2] = (byte)count;
Array.Copy(buffer.Buffer, pos + 9, buffer.Buffer, pos + 3, size);
buffer.Shrink(6);
}
else
{
buffer.Buffer[pos] = FormatCode.Map32;
AmqpBitConverter.WriteInt(buffer.Buffer, pos + 1, size + 4);
AmqpBitConverter.WriteInt(buffer.Buffer, pos + 5, count);
}
}
}
/// <summary>
/// Reads an object from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
public static object ReadObject(ByteBuffer buffer)
{
byte formatCode = Encoder.ReadFormatCode(buffer);
return ReadObject(buffer, formatCode);
}
/// <summary>
/// Reads an object from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
/// <returns></returns>
public static object ReadObject(ByteBuffer buffer, byte formatCode)
{
Serializer serializer = GetSerializer(formatCode);
if (serializer != null)
{
return serializer.Decoder(buffer, formatCode);
}
if (formatCode == FormatCode.Described)
{
return ReadDescribed(buffer, formatCode);
}
throw DecodeException(formatCode, buffer.Offset);
}
/// <summary>
/// Reads a described value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static object ReadDescribed(ByteBuffer buffer, byte formatCode)
{
Fx.Assert(formatCode == FormatCode.Described, "Format code must be described (0)");
Described described;
object descriptor = Encoder.ReadObject(buffer);
CreateDescribed create = null;
if ((create = (CreateDescribed)knownDescrided[descriptor]) == null)
{
object value = Encoder.ReadObject(buffer);
described = new DescribedValue(descriptor, value);
}
else
{
described = create();
described.DecodeValue(buffer);
}
return described;
}
/// <summary>
/// Reads a boolean value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static bool ReadBoolean(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.BooleanTrue)
{
return true;
}
else if (formatCode == FormatCode.BooleanFalse)
{
return false;
}
else if (formatCode == FormatCode.Boolean)
{
byte data = AmqpBitConverter.ReadUByte(buffer);
return data != 0;
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads an unsigned byte value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static byte ReadUByte(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.UByte)
{
return AmqpBitConverter.ReadUByte(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads an unsigned 16-bit integer from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static ushort ReadUShort(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.UShort)
{
return AmqpBitConverter.ReadUShort(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads an unsigned 32-bit integer from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static uint ReadUInt(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.UInt0)
{
return 0;
}
else if (formatCode == FormatCode.SmallUInt)
{
return AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == FormatCode.UInt)
{
return AmqpBitConverter.ReadUInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads an unsigned 64-bit integer from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static ulong ReadULong(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.ULong0)
{
return 0;
}
else if (formatCode == FormatCode.SmallULong)
{
return AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == FormatCode.ULong)
{
return AmqpBitConverter.ReadULong(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a signed byte from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static sbyte ReadByte(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Byte)
{
return AmqpBitConverter.ReadByte(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a signed 16-bit integer from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static short ReadShort(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Short)
{
return AmqpBitConverter.ReadShort(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a signed 32-bit integer from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static int ReadInt(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.SmallInt)
{
return AmqpBitConverter.ReadByte(buffer);
}
else if (formatCode == FormatCode.Int)
{
return AmqpBitConverter.ReadInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a signed 64-bit integer from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static long ReadLong(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.SmallLong)
{
return AmqpBitConverter.ReadByte(buffer);
}
else if (formatCode == FormatCode.Long)
{
return AmqpBitConverter.ReadLong(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a char value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static char ReadChar(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Char)
{
return (char)AmqpBitConverter.ReadInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a 32-bit floating-point value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static float ReadFloat(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Float)
{
return AmqpBitConverter.ReadFloat(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a 64-bit floating-point value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static double ReadDouble(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Double)
{
return AmqpBitConverter.ReadDouble(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a timestamp value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static DateTime ReadTimestamp(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.TimeStamp)
{
return TimestampToDateTime(AmqpBitConverter.ReadLong(buffer));
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a uuid value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static Guid ReadUuid(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Uuid)
{
return AmqpBitConverter.ReadUuid(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
}
/// <summary>
/// Reads a binary value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static byte[] ReadBinary(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Null)
{
return null;
}
int count;
if (formatCode == FormatCode.Binary8)
{
count = AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == FormatCode.Binary32)
{
count = (int)AmqpBitConverter.ReadUInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
buffer.Validate(false, count);
byte[] value = new byte[count];
Array.Copy(buffer.Buffer, buffer.Offset, value, 0, count);
buffer.Complete(count);
return value;
}
/// <summary>
/// Reads a string value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static string ReadString(ByteBuffer buffer, byte formatCode)
{
return ReadString(buffer, formatCode, FormatCode.String8Utf8, FormatCode.String32Utf8, "string");
}
/// <summary>
/// Reads a symbol value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static Symbol ReadSymbol(ByteBuffer buffer, byte formatCode)
{
return ReadString(buffer, formatCode, FormatCode.Symbol8, FormatCode.Symbol32, "symbol");
}
/// <summary>
/// Reads a list value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static List ReadList(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Null)
{
return null;
}
int size;
int count;
if (formatCode == FormatCode.List0)
{
size = count = 0;
}
else if (formatCode == FormatCode.List8)
{
size = AmqpBitConverter.ReadUByte(buffer);
count = AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == FormatCode.List32)
{
size = (int)AmqpBitConverter.ReadUInt(buffer);
count = (int)AmqpBitConverter.ReadUInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
List value = new List();
for (int i = 0; i < count; ++i)
{
value.Add(ReadObject(buffer));
}
return value;
}
/// <summary>
/// Reads an array value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static Array ReadArray(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Null)
{
return null;
}
int size;
int count;
if (formatCode == FormatCode.Array8)
{
size = AmqpBitConverter.ReadUByte(buffer);
count = AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == FormatCode.Array32)
{
size = (int)AmqpBitConverter.ReadUInt(buffer);
count = (int)AmqpBitConverter.ReadUInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
formatCode = Encoder.ReadFormatCode(buffer);
Serializer codec = GetSerializer(formatCode);
if (codec == null)
{
throw DecodeException(formatCode, buffer.Offset);
}
Array value = Array.CreateInstance(codec.Type, count);
IList list = value;
for (int i = 0; i < count; ++i)
{
list[i] = codec.Decoder(buffer, formatCode);
}
return value;
}
/// <summary>
/// Reads a map value from a buffer.
/// </summary>
/// <param name="buffer">The buffer to read.</param>
/// <param name="formatCode">The format code of the value.</param>
public static Map ReadMap(ByteBuffer buffer, byte formatCode)
{
if (formatCode == FormatCode.Null)
{
return null;
}
int size;
int count;
if (formatCode == FormatCode.Map8)
{
size = AmqpBitConverter.ReadUByte(buffer);
count = AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == FormatCode.Map32)
{
size = (int)AmqpBitConverter.ReadUInt(buffer);
count = (int)AmqpBitConverter.ReadUInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
if (count % 2 > 0)
{
throw new AmqpException(ErrorCode.DecodeError,
Fx.Format(SRAmqp.InvalidMapCount, count));
}
Map value = new Map();
for (int i = 0; i < count; i += 2)
{
value.Add(ReadObject(buffer), ReadObject(buffer));
}
return value;
}
static Serializer GetSerializer(byte formatCode)
{
int type = ((formatCode & 0xF0) >> 4) - 4;
if (type >= 0 && type < codecIndexTable.Length)
{
int index = formatCode & 0x0F;
if (index < codecIndexTable[type].Length)
{
return serializers[codecIndexTable[type][index]];
}
}
return null;
}
static string ReadString(ByteBuffer buffer, byte formatCode, byte code8, byte code32, string type)
{
if (formatCode == FormatCode.Null)
{
return null;
}
int count;
if (formatCode == code8)
{
count = AmqpBitConverter.ReadUByte(buffer);
}
else if (formatCode == code32)
{
count = (int)AmqpBitConverter.ReadUInt(buffer);
}
else
{
throw DecodeException(formatCode, buffer.Offset);
}
buffer.Validate(false, count);
string value = new string(Encoding.UTF8.GetChars(buffer.Buffer, buffer.Offset, count));
buffer.Complete(count);
return value;
}
static AmqpException DecodeException(byte formatCode, int offset)
{
return new AmqpException(ErrorCode.DecodeError,
Fx.Format(SRAmqp.AmqpInvalidFormatCode, formatCode, offset));
}
}
}
| |
using CakeMail.RestClient.Models;
using CakeMail.RestClient.Utilities;
using Pathoschild.Http.Client;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace CakeMail.RestClient.Resources
{
/// <summary>
/// Allows you to manage Campaigns.
/// </summary>
/// <seealso cref="CakeMail.RestClient.Resources.ICampaigns" />
public class Campaigns : ICampaigns
{
#region Fields
private readonly IClient _client;
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="Campaigns" /> class.
/// </summary>
/// <param name="client">The HTTP client.</param>
internal Campaigns(IClient client)
{
_client = client;
}
#endregion
#region Public Methods
/// <summary>
/// Create a new campaign.
/// </summary>
/// <param name="userKey">User Key of the user who initiates the call.</param>
/// <param name="name">Name of the campaign.</param>
/// <param name="clientId">Client ID of the client in which the campaign is created.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>ID of the new campaign.</returns>
public Task<long> CreateAsync(string userKey, string name, long? clientId = null, CancellationToken cancellationToken = default)
{
var parameters = new List<KeyValuePair<string, object>>
{
new KeyValuePair<string, object>("user_key", userKey),
new KeyValuePair<string, object>("name", name)
};
if (clientId.HasValue) parameters.Add(new KeyValuePair<string, object>("client_id", clientId.Value));
return _client
.PostAsync("Campaign/Create")
.WithFormUrlEncodedBody(parameters)
.WithCancellationToken(cancellationToken)
.AsCakeMailObject<long>();
}
/// <summary>
/// Delete a campaign.
/// </summary>
/// <param name="userKey">User Key of the user who initiates the call.</param>
/// <param name="campaignId">ID of the campaign to delete.</param>
/// <param name="clientId">Client ID of the client in which the campaign is located.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>True if the campaign is deleted.</returns>
public Task<bool> DeleteAsync(string userKey, long campaignId, long? clientId = null, CancellationToken cancellationToken = default)
{
var parameters = new List<KeyValuePair<string, object>>
{
new KeyValuePair<string, object>("user_key", userKey),
new KeyValuePair<string, object>("campaign_id", campaignId)
};
if (clientId.HasValue) parameters.Add(new KeyValuePair<string, object>("client_id", clientId.Value));
return _client
.PostAsync("Campaign/Delete")
.WithFormUrlEncodedBody(parameters)
.WithCancellationToken(cancellationToken)
.AsCakeMailObject<bool>();
}
/// <summary>
/// Retrieve a campaign.
/// </summary>
/// <param name="userKey">User Key of the user who initiates the call.</param>
/// <param name="campaignId">ID of the campaign.</param>
/// <param name="clientId">Client ID of the client in which the campaign is located.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The <see cref="Campaign">campaign</see>.</returns>
public Task<Campaign> GetAsync(string userKey, long campaignId, long? clientId = null, CancellationToken cancellationToken = default)
{
var parameters = new List<KeyValuePair<string, object>>
{
new KeyValuePair<string, object>("user_key", userKey),
new KeyValuePair<string, object>("campaign_id", campaignId)
};
if (clientId.HasValue) parameters.Add(new KeyValuePair<string, object>("client_id", clientId.Value));
return _client
.PostAsync("Campaign/GetInfo")
.WithFormUrlEncodedBody(parameters)
.WithCancellationToken(cancellationToken)
.AsCakeMailObject<Campaign>();
}
/// <summary>
/// Retrieve the campaigns matching the filtering criteria.
/// </summary>
/// <param name="userKey">User Key of the user who initiates the call.</param>
/// <param name="status">Filter using the campaign status. Possible value 'ongoing', 'closed'.</param>
/// <param name="name">Filter using the campaign name.</param>
/// <param name="sortBy">Sort resulting campaigns. Possible value 'created_on', 'name'.</param>
/// <param name="sortDirection">Direction of the sorting. Possible value 'asc', 'desc'.</param>
/// <param name="limit">Limit the number of resulting campaigns.</param>
/// <param name="offset">Offset the beginning of resulting campaigns.</param>
/// <param name="clientId">Client ID of the client in which the campaign is located.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>Array of <see cref="Campaign">campaigns</see> matching the filtering criteria.</returns>
public Task<Campaign[]> GetListAsync(string userKey, CampaignStatus? status = null, string name = null, CampaignsSortBy? sortBy = null, SortDirection? sortDirection = null, int? limit = 0, int? offset = 0, long? clientId = null, CancellationToken cancellationToken = default)
{
var parameters = new List<KeyValuePair<string, object>>
{
new KeyValuePair<string, object>("user_key", userKey),
new KeyValuePair<string, object>("count", "false")
};
if (status.HasValue) parameters.Add(new KeyValuePair<string, object>("status", status.Value.GetEnumMemberValue()));
if (name != null) parameters.Add(new KeyValuePair<string, object>("name", name));
if (sortBy.HasValue) parameters.Add(new KeyValuePair<string, object>("sort_by", sortBy.Value.GetEnumMemberValue()));
if (sortDirection.HasValue) parameters.Add(new KeyValuePair<string, object>("direction", sortDirection.Value.GetEnumMemberValue()));
if (limit > 0) parameters.Add(new KeyValuePair<string, object>("limit", limit));
if (offset > 0) parameters.Add(new KeyValuePair<string, object>("offset", offset));
if (clientId.HasValue) parameters.Add(new KeyValuePair<string, object>("client_id", clientId.Value));
return _client
.PostAsync("Campaign/GetList")
.WithFormUrlEncodedBody(parameters)
.WithCancellationToken(cancellationToken)
.AsCakeMailObject<Campaign[]>("campaigns");
}
/// <summary>
/// Get a count of campaigns matching the filtering criteria.
/// </summary>
/// <param name="userKey">User Key of the user who initiates the call.</param>
/// <param name="status">Filter using the campaign status. Possible value 'ongoing', 'closed'.</param>
/// <param name="name">Filter using the campaign name.</param>
/// <param name="clientId">Client ID of the client in which the campaign is located.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>The count of campaigns matching the filtering criteria.</returns>
public Task<long> GetCountAsync(string userKey, CampaignStatus? status = null, string name = null, long? clientId = null, CancellationToken cancellationToken = default)
{
var parameters = new List<KeyValuePair<string, object>>
{
new KeyValuePair<string, object>("user_key", userKey),
new KeyValuePair<string, object>("count", "true")
};
if (status.HasValue) parameters.Add(new KeyValuePair<string, object>("status", status.Value.GetEnumMemberValue()));
if (name != null) parameters.Add(new KeyValuePair<string, object>("name", name));
if (clientId.HasValue) parameters.Add(new KeyValuePair<string, object>("client_id", clientId.Value));
return _client
.PostAsync("Campaign/GetList")
.WithFormUrlEncodedBody(parameters)
.WithCancellationToken(cancellationToken)
.AsCakeMailObject<long>("count");
}
/// <summary>
/// Update a campaign.
/// </summary>
/// <param name="userKey">User Key of the user who initiates the call.</param>
/// <param name="campaignId">ID of the campaign.</param>
/// <param name="status">The status of the campaign. Possible value 'ongoing', 'closed'.</param>
/// <param name="name">The name of the campaign.</param>
/// <param name="clientId">Client ID of the client in which the campaign is located.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>True if the record was updated.</returns>
public Task<bool> UpdateAsync(string userKey, long campaignId, CampaignStatus? status = null, string name = null, long? clientId = null, CancellationToken cancellationToken = default)
{
var parameters = new List<KeyValuePair<string, object>>
{
new KeyValuePair<string, object>("user_key", userKey),
new KeyValuePair<string, object>("campaign_id", campaignId),
};
if (status.HasValue) parameters.Add(new KeyValuePair<string, object>("status", status.Value.GetEnumMemberValue()));
if (!string.IsNullOrEmpty(name)) parameters.Add(new KeyValuePair<string, object>("name", name));
if (clientId.HasValue) parameters.Add(new KeyValuePair<string, object>("client_id", clientId.Value));
return _client
.PostAsync("Campaign/SetInfo")
.WithFormUrlEncodedBody(parameters)
.WithCancellationToken(cancellationToken)
.AsCakeMailObject<bool>();
}
#endregion
}
}
| |
// Type: System.Drawing.Color
// Assembly: System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a
// Assembly location: C:\Windows\Microsoft.NET\Framework\v2.0.50727\System.Drawing.dll
using System;
using System.Text;
namespace TheArtOfDev.HtmlRenderer.Adapters.Entities
{
/// <summary>
/// Represents an ARGB (alpha, red, green, blue) color.
/// </summary>
public struct RColor
{
#region Fields and Consts
/// <summary>
/// Represents a color that is null.
/// </summary>
/// <filterpriority>1</filterpriority>
public static readonly RColor Empty = new RColor();
private readonly long _value;
#endregion
private RColor(long value)
{
_value = value;
}
/// <summary>
/// Gets a system-defined color.
/// </summary>
public static RColor Transparent
{
get { return new RColor(0); }
}
/// <summary>
/// Gets a system-defined color that has an ARGB value of #FF000000.
/// </summary>
public static RColor Black
{
get { return FromArgb(0, 0, 0); }
}
/// <summary>
/// Gets a system-defined color that has an ARGB value of #FFFFFFFF.
/// </summary>
public static RColor White
{
get { return FromArgb(255, 255, 255); }
}
/// <summary>
/// Gets a system-defined color that has an ARGB value of #FFF5F5F5.
/// </summary>
public static RColor WhiteSmoke
{
get { return FromArgb(245, 245, 245); }
}
/// <summary>
/// Gets a system-defined color that has an ARGB value of #FFD3D3D3.
/// </summary>
public static RColor LightGray
{
get { return FromArgb(211, 211, 211); }
}
/// <summary>
/// Gets the red component value of this <see cref="RColor" /> structure.
/// </summary>
public byte R
{
get { return (byte)((ulong)(_value >> 16) & byte.MaxValue); }
}
/// <summary>
/// Gets the green component value of this <see cref="RColor" /> structure.
/// </summary>
public byte G
{
get { return (byte)((ulong)(_value >> 8) & byte.MaxValue); }
}
/// <summary>
/// Gets the blue component value of this <see cref="RColor" /> structure.
/// </summary>
public byte B
{
get { return (byte)((ulong)_value & byte.MaxValue); }
}
/// <summary>
/// Gets the alpha component value of this <see cref="RColor" /> structure.
/// </summary>
public byte A
{
get { return (byte)((ulong)(_value >> 24) & byte.MaxValue); }
}
/// <summary>
/// Specifies whether this <see cref="RColor" /> structure is uninitialized.
/// </summary>
/// <returns>
/// This property returns true if this color is uninitialized; otherwise, false.
/// </returns>
/// <filterpriority>1</filterpriority>
public bool IsEmpty
{
get { return _value == 0; }
}
/// <summary>
/// Tests whether two specified <see cref="RColor" /> structures are equivalent.
/// </summary>
/// <returns>
/// true if the two <see cref="RColor" /> structures are equal; otherwise, false.
/// </returns>
/// <param name="left">
/// The <see cref="RColor" /> that is to the left of the equality operator.
/// </param>
/// <param name="right">
/// The <see cref="RColor" /> that is to the right of the equality operator.
/// </param>
/// <filterpriority>3</filterpriority>
public static bool operator ==(RColor left, RColor right)
{
return left._value == right._value;
}
/// <summary>
/// Tests whether two specified <see cref="RColor" /> structures are different.
/// </summary>
/// <returns>
/// true if the two <see cref="RColor" /> structures are different; otherwise, false.
/// </returns>
/// <param name="left">
/// The <see cref="RColor" /> that is to the left of the inequality operator.
/// </param>
/// <param name="right">
/// The <see cref="RColor" /> that is to the right of the inequality operator.
/// </param>
/// <filterpriority>3</filterpriority>
public static bool operator !=(RColor left, RColor right)
{
return !(left == right);
}
/// <summary>
/// Creates a <see cref="RColor" /> structure from the four ARGB component (alpha, red, green, and blue) values. Although this method allows a 32-bit value to be passed for each component, the value of each component is limited to 8 bits.
/// </summary>
/// <returns>
/// The <see cref="RColor" /> that this method creates.
/// </returns>
/// <param name="alpha">The alpha component. Valid values are 0 through 255. </param>
/// <param name="red">The red component. Valid values are 0 through 255. </param>
/// <param name="green">The green component. Valid values are 0 through 255. </param>
/// <param name="blue">The blue component. Valid values are 0 through 255. </param>
/// <exception cref="T:System.ArgumentException">
/// <paramref name="alpha" />, <paramref name="red" />, <paramref name="green" />, or <paramref name="blue" /> is less than 0 or greater than 255.
/// </exception>
/// <filterpriority>1</filterpriority>
public static RColor FromArgb(int alpha, int red, int green, int blue)
{
CheckByte(alpha);
CheckByte(red);
CheckByte(green);
CheckByte(blue);
return new RColor((uint)(red << 16 | green << 8 | blue | alpha << 24) & (long)uint.MaxValue);
}
/// <summary>
/// Creates a <see cref="RColor" /> structure from the specified 8-bit color values (red, green, and blue). The alpha value is implicitly 255 (fully opaque). Although this method allows a 32-bit value to be passed for each color component, the value of each component is limited to 8 bits.
/// </summary>
/// <returns>
/// The <see cref="RColor" /> that this method creates.
/// </returns>
/// <param name="red">
/// The red component value for the new <see cref="RColor" />. Valid values are 0 through 255.
/// </param>
/// <param name="green">
/// The green component value for the new <see cref="RColor" />. Valid values are 0 through 255.
/// </param>
/// <param name="blue">
/// The blue component value for the new <see cref="RColor" />. Valid values are 0 through 255.
/// </param>
/// <exception cref="T:System.ArgumentException">
/// <paramref name="red" />, <paramref name="green" />, or <paramref name="blue" /> is less than 0 or greater than 255.
/// </exception>
/// <filterpriority>1</filterpriority>
public static RColor FromArgb(int red, int green, int blue)
{
return FromArgb(byte.MaxValue, red, green, blue);
}
/// <summary>
/// Tests whether the specified object is a <see cref="RColor" /> structure and is equivalent to this
/// <see
/// cref="RColor" />
/// structure.
/// </summary>
/// <returns>
/// true if <paramref name="obj" /> is a <see cref="RColor" /> structure equivalent to this
/// <see
/// cref="RColor" />
/// structure; otherwise, false.
/// </returns>
/// <param name="obj">The object to test. </param>
/// <filterpriority>1</filterpriority>
public override bool Equals(object obj)
{
if (obj is RColor)
{
var color = (RColor)obj;
return _value == color._value;
}
return false;
}
/// <summary>
/// Returns a hash code for this <see cref="RColor" /> structure.
/// </summary>
/// <returns>
/// An integer value that specifies the hash code for this <see cref="RColor" />.
/// </returns>
/// <filterpriority>1</filterpriority>
public override int GetHashCode()
{
return _value.GetHashCode();
}
/// <summary>
/// Converts this <see cref="RColor" /> structure to a human-readable string.
/// </summary>
public override string ToString()
{
var stringBuilder = new StringBuilder(32);
stringBuilder.Append(GetType().Name);
stringBuilder.Append(" [");
if (_value != 0)
{
stringBuilder.Append("A=");
stringBuilder.Append(A);
stringBuilder.Append(", R=");
stringBuilder.Append(R);
stringBuilder.Append(", G=");
stringBuilder.Append(G);
stringBuilder.Append(", B=");
stringBuilder.Append(B);
}
else
stringBuilder.Append("Empty");
stringBuilder.Append("]");
return stringBuilder.ToString();
}
#region Private methods
private static void CheckByte(int value)
{
if (value >= 0 && value <= byte.MaxValue)
return;
throw new ArgumentException("InvalidEx2BoundArgument");
}
#endregion
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Reactive.Disposables;
using Avalonia.Platform;
namespace Avalonia.Threading
{
/// <summary>
/// A timer that uses a <see cref="Dispatcher"/> to fire at a specified interval.
/// </summary>
public class DispatcherTimer
{
private IDisposable _timer;
private readonly DispatcherPriority _priority;
private TimeSpan _interval;
/// <summary>
/// Initializes a new instance of the <see cref="DispatcherTimer"/> class.
/// </summary>
public DispatcherTimer() : this(DispatcherPriority.Background)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="DispatcherTimer"/> class.
/// </summary>
/// <param name="priority">The priority to use.</param>
public DispatcherTimer(DispatcherPriority priority)
{
_priority = priority;
}
/// <summary>
/// Initializes a new instance of the <see cref="DispatcherTimer"/> class.
/// </summary>
/// <param name="interval">The interval at which to tick.</param>
/// <param name="priority">The priority to use.</param>
/// <param name="callback">The event to call when the timer ticks.</param>
public DispatcherTimer(TimeSpan interval, DispatcherPriority priority, EventHandler callback) : this(priority)
{
_priority = priority;
Interval = interval;
Tick += callback;
}
/// <summary>
/// Finalizes an instance of the <see cref="DispatcherTimer"/> class.
/// </summary>
~DispatcherTimer()
{
if (_timer != null)
{
Stop();
}
}
/// <summary>
/// Raised when the timer ticks.
/// </summary>
public event EventHandler Tick;
/// <summary>
/// Gets or sets the interval at which the timer ticks.
/// </summary>
public TimeSpan Interval
{
get
{
return _interval;
}
set
{
bool enabled = IsEnabled;
Stop();
_interval = value;
IsEnabled = enabled;
}
}
/// <summary>
/// Gets or sets a value indicating whether the timer is running.
/// </summary>
public bool IsEnabled
{
get
{
return _timer != null;
}
set
{
if (IsEnabled != value)
{
if (value)
{
Start();
}
else
{
Stop();
}
}
}
}
/// <summary>
/// Gets or sets user-defined data associated with the timer.
/// </summary>
public object Tag
{
get;
set;
}
/// <summary>
/// Starts a new timer.
/// </summary>
/// <param name="action">
/// The method to call on timer tick. If the method returns false, the timer will stop.
/// </param>
/// <param name="interval">The interval at which to tick.</param>
/// <param name="priority">The priority to use.</param>
/// <returns>An <see cref="IDisposable"/> used to cancel the timer.</returns>
public static IDisposable Run(Func<bool> action, TimeSpan interval, DispatcherPriority priority = DispatcherPriority.Normal)
{
var timer = new DispatcherTimer(priority) { Interval = interval };
timer.Tick += (s, e) =>
{
if (!action())
{
timer.Stop();
}
};
timer.Start();
return Disposable.Create(() => timer.Stop());
}
/// <summary>
/// Runs a method once, after the specified interval.
/// </summary>
/// <param name="action">
/// The method to call after the interval has elapsed.
/// </param>
/// <param name="interval">The interval after which to call the method.</param>
/// <param name="priority">The priority to use.</param>
/// <returns>An <see cref="IDisposable"/> used to cancel the timer.</returns>
public static IDisposable RunOnce(
Action action,
TimeSpan interval,
DispatcherPriority priority = DispatcherPriority.Normal)
{
var timer = new DispatcherTimer(priority) { Interval = interval };
timer.Tick += (s, e) =>
{
action();
timer.Stop();
};
timer.Start();
return Disposable.Create(() => timer.Stop());
}
/// <summary>
/// Starts the timer.
/// </summary>
public void Start()
{
if (!IsEnabled)
{
IPlatformThreadingInterface threading = AvaloniaLocator.Current.GetService<IPlatformThreadingInterface>();
if (threading == null)
{
throw new Exception("Could not start timer: IPlatformThreadingInterface is not registered.");
}
_timer = threading.StartTimer(_priority, Interval, InternalTick);
}
}
/// <summary>
/// Stops the timer.
/// </summary>
public void Stop()
{
if (IsEnabled)
{
_timer.Dispose();
_timer = null;
}
}
/// <summary>
/// Raises the <see cref="Tick"/> event on the dispatcher thread.
/// </summary>
private void InternalTick()
{
Dispatcher.UIThread.EnsurePriority(_priority);
Tick?.Invoke(this, EventArgs.Empty);
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the lambda-2014-11-11.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.Lambda.Model
{
/// <summary>
/// A complex type that describes function metadata.
/// </summary>
public partial class GetFunctionConfigurationResult : AmazonWebServiceResponse
{
private long? _codeSize;
private string _configurationId;
private string _description;
private string _functionARN;
private string _functionName;
private string _handler;
private string _lastModified;
private int? _memorySize;
private Mode _mode;
private string _role;
private Runtime _runtime;
private int? _timeout;
/// <summary>
/// Gets and sets the property CodeSize.
/// <para>
/// The size, in bytes, of the function .zip file you uploaded.
/// </para>
/// </summary>
public long CodeSize
{
get { return this._codeSize.GetValueOrDefault(); }
set { this._codeSize = value; }
}
// Check to see if CodeSize property is set
internal bool IsSetCodeSize()
{
return this._codeSize.HasValue;
}
/// <summary>
/// Gets and sets the property ConfigurationId.
/// <para>
/// A Lambda-assigned unique identifier for the current function code and related configuration.
/// </para>
/// </summary>
public string ConfigurationId
{
get { return this._configurationId; }
set { this._configurationId = value; }
}
// Check to see if ConfigurationId property is set
internal bool IsSetConfigurationId()
{
return this._configurationId != null;
}
/// <summary>
/// Gets and sets the property Description.
/// <para>
/// The user-provided description.
/// </para>
/// </summary>
public string Description
{
get { return this._description; }
set { this._description = value; }
}
// Check to see if Description property is set
internal bool IsSetDescription()
{
return this._description != null;
}
/// <summary>
/// Gets and sets the property FunctionARN.
/// <para>
/// The Amazon Resource Name (ARN) assigned to the function.
/// </para>
/// </summary>
public string FunctionARN
{
get { return this._functionARN; }
set { this._functionARN = value; }
}
// Check to see if FunctionARN property is set
internal bool IsSetFunctionARN()
{
return this._functionARN != null;
}
/// <summary>
/// Gets and sets the property FunctionName.
/// <para>
/// The name of the function.
/// </para>
/// </summary>
public string FunctionName
{
get { return this._functionName; }
set { this._functionName = value; }
}
// Check to see if FunctionName property is set
internal bool IsSetFunctionName()
{
return this._functionName != null;
}
/// <summary>
/// Gets and sets the property Handler.
/// <para>
/// The function Lambda calls to begin executing your function.
/// </para>
/// </summary>
public string Handler
{
get { return this._handler; }
set { this._handler = value; }
}
// Check to see if Handler property is set
internal bool IsSetHandler()
{
return this._handler != null;
}
/// <summary>
/// Gets and sets the property LastModified.
/// <para>
/// The timestamp of the last time you updated the function.
/// </para>
/// </summary>
public string LastModified
{
get { return this._lastModified; }
set { this._lastModified = value; }
}
// Check to see if LastModified property is set
internal bool IsSetLastModified()
{
return this._lastModified != null;
}
/// <summary>
/// Gets and sets the property MemorySize.
/// <para>
/// The memory size, in MB, you configured for the function. Must be a multiple of 64
/// MB.
/// </para>
/// </summary>
public int MemorySize
{
get { return this._memorySize.GetValueOrDefault(); }
set { this._memorySize = value; }
}
// Check to see if MemorySize property is set
internal bool IsSetMemorySize()
{
return this._memorySize.HasValue;
}
/// <summary>
/// Gets and sets the property Mode.
/// <para>
/// The type of the Lambda function you uploaded.
/// </para>
/// </summary>
public Mode Mode
{
get { return this._mode; }
set { this._mode = value; }
}
// Check to see if Mode property is set
internal bool IsSetMode()
{
return this._mode != null;
}
/// <summary>
/// Gets and sets the property Role.
/// <para>
/// The Amazon Resource Name (ARN) of the IAM role that Lambda assumes when it executes
/// your function to access any other Amazon Web Services (AWS) resources.
/// </para>
/// </summary>
public string Role
{
get { return this._role; }
set { this._role = value; }
}
// Check to see if Role property is set
internal bool IsSetRole()
{
return this._role != null;
}
/// <summary>
/// Gets and sets the property Runtime.
/// <para>
/// The runtime environment for the Lambda function.
/// </para>
/// </summary>
public Runtime Runtime
{
get { return this._runtime; }
set { this._runtime = value; }
}
// Check to see if Runtime property is set
internal bool IsSetRuntime()
{
return this._runtime != null;
}
/// <summary>
/// Gets and sets the property Timeout.
/// <para>
/// The function execution time at which Lambda should terminate the function. Because
/// the execution time has cost implications, we recommend you set this value based on
/// your expected execution time. The default is 3 seconds.
/// </para>
/// </summary>
public int Timeout
{
get { return this._timeout.GetValueOrDefault(); }
set { this._timeout = value; }
}
// Check to see if Timeout property is set
internal bool IsSetTimeout()
{
return this._timeout.HasValue;
}
}
}
| |
using System;
using System.Collections.Generic;
using Xunit;
using System.IO;
namespace HandlebarsDotNet.Test
{
public class PartialTests
{
[Fact]
public void BasicPartial()
{
string source = "Hello, {{>person}}!";
var template = Handlebars.Compile(source);
var data = new {
name = "Marc"
};
var partialSource = "{{name}}";
using(var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc!", result);
}
[Fact]
public void BasicPartialWithWhiteSpace()
{
string source = "Hello, {{> person }}!";
var template = Handlebars.Compile(source);
var data = new {
name = "Marc"
};
var partialSource = "{{name}}";
using(var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc!", result);
}
[Fact]
public void BasicStringOnlyPartial()
{
string source = "Hello, {{>person}}!";
var template = Handlebars.Compile(source);
var data = new {
name = "Marc"
};
var partialSource = "{{name}}";
Handlebars.RegisterTemplate("person", partialSource);
var result = template(data);
Assert.Equal("Hello, Marc!", result);
}
[Fact]
public void BasicPartialWithContext()
{
string source = "Hello, {{>person leadDev}}!";
var template = Handlebars.Compile(source);
var data = new {
leadDev = new {
name = "Marc"
}
};
var partialSource = "{{name}}";
using(var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc!", result);
}
[Fact]
public void BasicPartialWithStringParameter()
{
string source = "Hello, {{>person first='Pete'}}!";
var template = Handlebars.Compile(source);
var partialSource = "{{first}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(null);
Assert.Equal("Hello, Pete!", result);
}
[Fact]
public void BasicPartialWithMultipleStringParameters()
{
string source = "Hello, {{>person first='Pete' last=\"Sampras\"}}!";
var template = Handlebars.Compile(source);
var partialSource = "{{first}} {{last}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(null);
Assert.Equal("Hello, Pete Sampras!", result);
}
[Fact]
public void BasicPartialWithContextParameter()
{
string source = "Hello, {{>person first=leadDev.marc}}!";
var template = Handlebars.Compile(source);
var data = new
{
leadDev = new
{
marc = new
{
name = "Marc"
}
}
};
var partialSource = "{{first.name}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc!", result);
}
[Fact]
public void BasicPartialWithContextAndStringParameters()
{
string source = "Hello, {{>person first=leadDev.marc last='Smith'}}!";
var template = Handlebars.Compile(source);
var data = new
{
leadDev = new
{
marc = new
{
name = "Marc"
}
}
};
var partialSource = "{{first.name}} {{last}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc Smith!", result);
}
[Fact]
public void BasicPartialWithTypedParameters()
{
string source = "Hello, {{>person first=1 last=true}}!";
var template = Handlebars.Compile(source);
var partialSource = "{{first}} {{last}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(null);
Assert.Equal("Hello, 1 True!", result);
}
[Fact]
public void BasicPartialWithSubExpressionParameters()
{
string source = "Hello, {{>person first=(_ first arg1=(_ \"value\")) last=(_ last)}}!";
Handlebars.RegisterHelper("_", (output, context, arguments) =>
{
output.Write(arguments[0].ToString());
if (arguments.Length > 1)
{
var hash = arguments[1] as Dictionary<string, object>;
output.Write(hash["arg1"]);
}
});
var template = Handlebars.Compile(source);
var partialSource = "{{first}} {{last}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(new { first = 1, last = true });
Assert.Equal("Hello, 1value True!", result);
}
[Fact]
public void BasicPartialWithStringParameterIncludingExpressionChars()
{
string source = "Hello, {{>person first='Pe ({~te~}) '}}!";
var template = Handlebars.Compile(source);
var partialSource = "{{first}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(null);
Assert.Equal("Hello, Pe ({~te~}) !", result);
}
[Fact]
public void DynamicPartial()
{
string source = "Hello, {{> (partialNameHelper)}}!";
Handlebars.RegisterHelper("partialNameHelper", (writer, context, args) =>
{
writer.WriteSafeString("partialName");
});
using (var reader = new StringReader("world"))
{
var partial = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("partialName", partial);
}
var template = Handlebars.Compile(source);
var data = new { };
var result = template(data);
Assert.Equal("Hello, world!", result);
}
[Fact]
public void DynamicPartialWithHelperArguments()
{
string source = "Hello, {{> (concat 'par' 'tial' item1='Na' item2='me')}}!";
Handlebars.RegisterHelper("concat", (writer, context, args) =>
{
var hash = args[2] as Dictionary<string, object>;
writer.WriteSafeString(string.Concat(args[0], args[1], hash["item1"], hash["item2"]));
});
using (var reader = new StringReader("world"))
{
var partial = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("partialName", partial);
}
var template = Handlebars.Compile(source);
var data = new { };
var result = template(data);
Assert.Equal("Hello, world!", result);
}
[Fact]
public void DynamicPartialWithContext()
{
var source = "Hello, {{> (lookup1 name) context }}!";
Handlebars.RegisterHelper("lookup1", (output, context, arguments) =>
{
output.WriteSafeString(arguments[0]);
});
var template = Handlebars.Compile(source);
using (var reader = new StringReader("{{first}} {{last}}"))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("test", partialTemplate);
}
var data = new
{
name = "test",
context = new
{
first = "Marc",
last = "Smith"
}
};
var result = template(data);
Assert.Equal("Hello, Marc Smith!", result);
}
[Fact]
public void DynamicPartialWithParameters()
{
var source = "Hello, {{> (lookup1 name) first='Marc' last='Smith' }}!";
Handlebars.RegisterHelper("lookup1", (output, context, arguments) =>
{
output.WriteSafeString(arguments[0]);
});
var template = Handlebars.Compile(source);
using (var reader = new StringReader("{{first}} {{last}}"))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("test", partialTemplate);
}
var data = new
{
name = "test"
};
var result = template(data);
Assert.Equal("Hello, Marc Smith!", result);
}
[Fact]
public void SuperfluousWhitespace()
{
string source = "Hello, {{ > person }}!";
var template = Handlebars.Compile(source);
var data = new {
name = "Marc"
};
var partialSource = "{{name}}";
using(var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc!", result);
}
[Fact]
public void BasicPartialWithStringParametersAndImplicitContext()
{
string source = "Hello, {{>person lastName='Smith'}}!";
var template = Handlebars.Compile(source);
var data = new
{
firstName = "Marc",
lastName = "Jones"
};
var partialSource = "{{firstName}} {{lastName}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc Smith!", result);
}
[Fact]
public void BasicPartialWithEmptyParameterDoesNotFallback()
{
string source = "Hello, {{>person lastName=test}}!";
var template = Handlebars.Compile(source);
var data = new
{
firstName = "Marc",
lastName = "Jones"
};
var partialSource = "{{firstName}} {{lastName}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc !", result);
}
[Fact]
public void BasicPartialWithIncompleteChildContextDoesNotFallback()
{
string source = "Hello, {{>person leadDev}}!";
var template = Handlebars.Compile(source);
var data = new
{
firstName = "Pete",
lastName = "Jones",
leadDev = new
{
firstName = "Marc"
}
};
var partialSource = "{{firstName}} {{lastName}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Marc !", result);
}
[Fact]
public void BasicPartialWithCustomBlockHelper()
{
string source = "Hello, {{>person title='Mr.'}}!";
var template = Handlebars.Compile(source);
var data = new
{
firstName = "Pete",
lastName = "Jones",
};
Handlebars.RegisterHelper("block", (writer, options, context, parameters) =>
options.Template(writer, context));
var partialSource = "{{#block}}{{title}} {{firstName}} {{lastName}}{{/block}}";
using (var reader = new StringReader(partialSource))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("person", partialTemplate);
}
var result = template(data);
Assert.Equal("Hello, Mr. Pete Jones!", result);
}
[Fact]
public void BasicBlockPartial()
{
string source = "Hello, {{#>person1}}friend{{/person1}}!";
var handlebars = Handlebars.Create();
var template = handlebars.Compile(source);
var data = new {
firstName = "Pete",
lastName = "Jones"
};
var result1 = template(data);
Assert.Equal ("Hello, friend!", result1);
var partialSource = "{{firstName}} {{lastName}}";
using (var reader = new StringReader(partialSource)) {
var partialTemplate = handlebars.Compile(reader);
handlebars.RegisterTemplate("person1", partialTemplate);
}
var result2 = template(data);
Assert.Equal("Hello, Pete Jones!", result2);
}
[Fact]
public void BasicBlockPartialWithWhitespace()
{
string source = "Hello, {{#> person1 }}friend{{/person1}}!";
var handlebars = Handlebars.Create();
var template = handlebars.Compile(source);
var data = new {
firstName = "Pete",
lastName = "Jones"
};
var result1 = template(data);
Assert.Equal ("Hello, friend!", result1);
var partialSource = "{{firstName}} {{lastName}}";
using (var reader = new StringReader(partialSource)) {
var partialTemplate = handlebars.Compile(reader);
handlebars.RegisterTemplate("person1", partialTemplate);
}
var result2 = template(data);
Assert.Equal("Hello, Pete Jones!", result2);
}
[Fact]
public void BasicBlockPartialWithArgument()
{
string source = "Hello, {{#>person2 arg='Todd'}}friend{{/person2}}!";
var handlebars = Handlebars.Create();
var template = handlebars.Compile (source);
var data = new {
firstName = "Pete",
lastName = "Jones"
};
var result1 = template (data);
Assert.Equal ("Hello, friend!", result1);
var partialSource = "{{arg}}";
using (var reader = new StringReader (partialSource)) {
var partialTemplate = handlebars.Compile (reader);
handlebars.RegisterTemplate ("person2", partialTemplate);
}
var result2 = template (data);
Assert.Equal ("Hello, Todd!", result2);
}
[Fact]
public void BasicBlockPartialWithArgumentAndWhitespace()
{
string source = "Hello, {{#> person2 arg='Todd'}}friend{{/person2}}!";
var handlebars = Handlebars.Create();
var template = handlebars.Compile (source);
var data = new {
firstName = "Pete",
lastName = "Jones"
};
var result1 = template (data);
Assert.Equal ("Hello, friend!", result1);
var partialSource = "{{arg}}";
using (var reader = new StringReader (partialSource)) {
var partialTemplate = handlebars.Compile (reader);
handlebars.RegisterTemplate ("person2", partialTemplate);
}
var result2 = template (data);
Assert.Equal ("Hello, Todd!", result2);
}
[Fact]
public void BlockPartialWithSpecialNamedPartial()
{
string source = "Well, {{#>myPartial}}some test{{/myPartial}} !";
var template = Handlebars.Compile(source);
var partialSource = "this is {{> @partial-block }} content";
using (var reader = new StringReader(partialSource)) {
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("myPartial", partialTemplate);
}
var data = new { };
var result = template(data);
Assert.Equal("Well, this is some test content !", result);
}
[Fact]
public void BlockPartialWithNestedSpecialNamedPartial()
{
string source = "Well, {{#>partial1}}some test{{/partial1}} !";
var template = Handlebars.Compile(source);
var partialSource1 = "this is {{> @partial-block }} content {{#>partial2}}works{{/partial2}} {{lastName}}";
using (var reader = new StringReader(partialSource1))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("partial1", partialTemplate);
}
var partialSource2 = "that {{> @partial-block}} great {{firstName}}";
using (var reader = new StringReader(partialSource2))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("partial2", partialTemplate);
}
var data = new {
firstName = "Pete",
lastName = "Jones"
};
var result = template(data);
Assert.Equal("Well, this is some test content that works great Pete Jones !", result);
}
[Fact]
public void BlockPartialWithNestedSpecialNamedPartial2()
{
string source = "A {{#>partial1}} B {{#>partial2}} {{VarC}} {{/partial2}} D {{/partial1}} E";
var template = Handlebars.Compile(source);
var partialSource1 = "1 {{> @partial-block }} 2";
using (var reader = new StringReader(partialSource1))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("partial1", partialTemplate);
}
var partialSource2 = "3 {{> @partial-block }} 4";
using (var reader = new StringReader(partialSource2))
{
var partialTemplate = Handlebars.Compile(reader);
Handlebars.RegisterTemplate("partial2", partialTemplate);
}
var data = new { VarC = "C" };
var result = template(data);
Assert.Equal("A 1 B 3 C 4 D 2 E", result);
}
[Fact]
public void TemplateWithSpecialNamedPartial()
{
string source = "Single template referencing {{> @partial-block }} should throw runtime exception";
var template = Handlebars.Compile(source);
var data = new {};
var ex = Assert.Throws<HandlebarsRuntimeException>(() => template(data));
Assert.Equal("Referenced partial name @partial-block could not be resolved", ex.Message);
}
public class TestMissingPartialTemplateHandler : IMissingPartialTemplateHandler
{
public void Handle(ICompiledHandlebarsConfiguration configuration, string partialName, in EncodedTextWriter textWriter)
{
textWriter.Write($"Partial Not Found: {partialName}");
}
}
[Fact]
public void MissingPartialTemplateHandler()
{
var source = "Missing template should not throw exception: {{> missing }}";
var handlebars = Handlebars.Create(new HandlebarsConfiguration
{
MissingPartialTemplateHandler = new TestMissingPartialTemplateHandler()
});
var template = handlebars.Compile(source);
var data = new { };
var result = template(data);
Assert.Equal("Missing template should not throw exception: Partial Not Found: missing", result);
}
[Fact]
public void SubExpressionPartial()
{
const string source = "Hello, {{> (partialNameHelper)}}!";
var handlebars = Handlebars.Create();
handlebars.RegisterHelper("partialNameHelper", (context, args) => "partialName");
using (var reader = new StringReader("world"))
{
var partial = handlebars.Compile(reader);
handlebars.RegisterTemplate("partialName", partial);
}
var template = handlebars.Compile(source);
var data = new {};
var result = template(data);
Assert.Equal("Hello, world!", result);
}
}
}
| |
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Collections.Generic;
[CustomEditor(typeof(tk2dFont))]
public class tk2dFontEditor : Editor
{
public Shader GetShader(bool gradient)
{
if (gradient) return Shader.Find("tk2d/Blend2TexVertexColor");
else return Shader.Find("tk2d/BlendVertexColor");
}
public override void OnInspectorGUI()
{
tk2dFont gen = (tk2dFont)target;
if (gen.proxyFont)
{
GUILayout.Label("This font is managed by a Sprite Collection");
return;
}
EditorGUILayout.BeginVertical();
DrawDefaultInspector();
gen.useTk2dCamera = EditorGUILayout.Toggle("Use tk2d Camera", gen.useTk2dCamera);
if (gen.useTk2dCamera)
{
gen.targetHeight = 1;
gen.targetOrthoSize = 0.5f;
}
else
{
EditorGUI.indentLevel = EditorGUI.indentLevel + 1;
gen.targetHeight = EditorGUILayout.IntField("Target Height", gen.targetHeight);
gen.targetOrthoSize = EditorGUILayout.FloatField("Target Ortho Size", gen.targetOrthoSize);
EditorGUI.indentLevel = EditorGUI.indentLevel - 1;
}
// Warning when texture is compressed
if (gen.texture != null)
{
Texture2D tex = (Texture2D)gen.texture;
if (tex && IsTextureCompressed(tex))
{
int buttonPressed;
if ((buttonPressed = tk2dGuiUtility.InfoBoxWithButtons(
"Font texture appears to be compressed. " +
"Quality will be lost and the texture may appear blocky in game.\n" +
"Do you wish to change the format?",
tk2dGuiUtility.WarningLevel.Warning,
new string[] { "16bit", "Truecolor" }
)) != -1)
{
if (buttonPressed == 0)
{
ConvertTextureToFormat(tex, TextureImporterFormat.Automatic16bit);
}
else
{
ConvertTextureToFormat(tex, TextureImporterFormat.AutomaticTruecolor);
}
}
}
}
// Warning when gradient texture is compressed
if (gen.gradientTexture != null &&
(gen.gradientTexture.format != TextureFormat.ARGB32 && gen.gradientTexture.format != TextureFormat.RGB24 && gen.gradientTexture.format != TextureFormat.RGBA32))
{
if (tk2dGuiUtility.InfoBoxWithButtons(
"The gradient texture should be truecolor for best quality. " +
"Current format is " + gen.gradientTexture.format.ToString() + ".",
tk2dGuiUtility.WarningLevel.Warning,
new string[] { "Fix" }
) != -1)
{
ConvertTextureToFormat(gen.gradientTexture, TextureImporterFormat.AutomaticTruecolor);
}
}
if (GUILayout.Button("Commit..."))
{
if (gen.bmFont == null || gen.texture == null)
{
EditorUtility.DisplayDialog("BMFont", "Need an bmFont and texture bound to work", "Ok");
return;
}
if (gen.material == null)
{
gen.material = new Material(GetShader(gen.gradientTexture != null));
string materialPath = AssetDatabase.GetAssetPath(gen).Replace(".prefab", "material.mat");
AssetDatabase.CreateAsset(gen.material, materialPath);
}
if (gen.data == null)
{
string bmFontPath = AssetDatabase.GetAssetPath(gen).Replace(".prefab", "data.prefab");
GameObject go = new GameObject();
go.AddComponent<tk2dFontData>();
tk2dEditorUtility.SetGameObjectActive(go, false);
#if (UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4)
Object p = EditorUtility.CreateEmptyPrefab(bmFontPath);
EditorUtility.ReplacePrefab(go, p);
#else
Object p = PrefabUtility.CreateEmptyPrefab(bmFontPath);
PrefabUtility.ReplacePrefab(go, p);
#endif
GameObject.DestroyImmediate(go);
AssetDatabase.SaveAssets();
gen.data = AssetDatabase.LoadAssetAtPath(bmFontPath, typeof(tk2dFontData)) as tk2dFontData;
}
ParseBMFont(AssetDatabase.GetAssetPath(gen.bmFont), gen.data, gen);
if (gen.manageMaterial)
{
Shader s = GetShader(gen.gradientTexture != null);
if (gen.material.shader != s)
{
gen.material.shader = s;
EditorUtility.SetDirty(gen.material);
}
if (gen.material.mainTexture != gen.texture)
{
gen.material.mainTexture = gen.texture;
EditorUtility.SetDirty(gen.material);
}
if (gen.gradientTexture != null && gen.gradientTexture != gen.material.GetTexture("_GradientTex"))
{
gen.material.SetTexture("_GradientTex", gen.gradientTexture);
EditorUtility.SetDirty(gen.material);
}
}
gen.data.version = tk2dFontData.CURRENT_VERSION;
gen.data.material = gen.material;
gen.data.textureGradients = gen.gradientTexture != null;
gen.data.gradientCount = gen.gradientCount;
gen.data.gradientTexture = gen.gradientTexture;
gen.data.invOrthoSize = 1.0f / gen.targetOrthoSize;
gen.data.halfTargetHeight = gen.targetHeight * 0.5f;
// Rebuild assets already present in the scene
tk2dTextMesh[] sprs = Resources.FindObjectsOfTypeAll(typeof(tk2dTextMesh)) as tk2dTextMesh[];
foreach (tk2dTextMesh spr in sprs)
{
spr.Init(true);
}
EditorUtility.SetDirty(gen);
EditorUtility.SetDirty(gen.data);
// update index
tk2dEditorUtility.GetOrCreateIndex().AddOrUpdateFont(gen);
tk2dEditorUtility.CommitIndex();
}
EditorGUILayout.EndVertical();
GUILayout.Space(64);
}
bool IsTextureCompressed(Texture2D texture)
{
if (texture.format == TextureFormat.ARGB32
|| texture.format == TextureFormat.ARGB4444
|| texture.format == TextureFormat.Alpha8
|| texture.format == TextureFormat.RGB24
|| texture.format == TextureFormat.RGB565
|| texture.format == TextureFormat.RGBA32)
{
return false;
}
else
{
return true;
}
}
void ConvertTextureToFormat(Texture2D texture, TextureImporterFormat format)
{
string assetPath = AssetDatabase.GetAssetPath(texture);
if (assetPath != "")
{
// make sure the source texture is npot and readable, and uncompressed
TextureImporter importer = (TextureImporter)TextureImporter.GetAtPath(assetPath);
if (importer.textureFormat != format)
importer.textureFormat = format;
AssetDatabase.ImportAsset(assetPath);
}
}
bool ParseBMFont(string path, tk2dFontData fontData, tk2dFont source)
{
float scale = 2.0f * source.targetOrthoSize / source.targetHeight;
if (source.useTk2dCamera) scale = 1.0f;
tk2dEditor.Font.Info fontInfo = tk2dEditor.Font.Builder.ParseBMFont(path);
if (fontInfo != null)
return tk2dEditor.Font.Builder.BuildFont(fontInfo, fontData, scale, source.charPadX, source.dupeCaps, source.flipTextureY, source.gradientTexture, source.gradientCount);
else
return false;
}
[MenuItem("Assets/Create/tk2d/Font", false, 11000)]
static void DoBMFontCreate()
{
string path = tk2dEditorUtility.CreateNewPrefab("Font");
if (path.Length != 0)
{
GameObject go = new GameObject();
tk2dFont font = go.AddComponent<tk2dFont>();
font.manageMaterial = true;
tk2dEditorUtility.SetGameObjectActive(go, false);
#if (UNITY_3_0 || UNITY_3_1 || UNITY_3_2 || UNITY_3_3 || UNITY_3_4)
Object p = EditorUtility.CreateEmptyPrefab(path);
EditorUtility.ReplacePrefab(go, p, ReplacePrefabOptions.ConnectToPrefab);
#else
Object p = PrefabUtility.CreateEmptyPrefab(path);
PrefabUtility.ReplacePrefab(go, p, ReplacePrefabOptions.ConnectToPrefab);
#endif
GameObject.DestroyImmediate(go);
// Select object
Selection.activeObject = AssetDatabase.LoadAssetAtPath(path, typeof(UnityEngine.Object));
}
}
}
| |
//-----------------------------------------------------------------------------
//
// <copyright file="ResourceContainer.cs" company="Microsoft">
// Copyright (C) Microsoft Corporation. All rights reserved.
// </copyright>
//
// Description:
// ResourceContainer is an implementation of the abstract Package class.
// It contains nontrivial overrides for GetPartCore and Exists.
// Many of the methods on Package are not applicable to loading application
// resources, so the ResourceContainer implementations of these methods throw
// the NotSupportedException.
//
// History:
// 10/04/2004: [....]: Initial creation.
// 03/15/2005: BruceMac: Remove DisposeCore() override since there are no resources
// to release.
// 06/16/2005: WeibZ, Remove DefaultResourceManager, and change the role of
// ResourceManagerWrapper.
//-----------------------------------------------------------------------------
using System;
using System.IO.Packaging;
using System.IO;
using System.Collections.Generic;
using System.Windows.Resources;
using System.Resources;
using System.Reflection;
using System.Globalization;
using MS.Internal.PresentationFramework; // SafeSecurityHelper
using System.Windows;
using System.Windows.Navigation;
using MS.Internal.Resources;
using System.Windows.Interop;
using System.Security;
namespace MS.Internal.AppModel
{
// <summary>
// ResourceContainer is an implementation of the abstract Package class.
// It contains nontrivial overrides for GetPartCore and Exists.
// Many of the methods on Package are not applicable to loading application
// resources, so the ResourceContainer implementations of these methods throw
// the NotSupportedException.
// </summary>
internal class ResourceContainer : System.IO.Packaging.Package
{
//------------------------------------------------------
//
// Static Methods
//
//------------------------------------------------------
#region Static Methods
internal static ResourceManagerWrapper ApplicationResourceManagerWrapper
{
get
{
if (_applicationResourceManagerWrapper == null)
{
// load main excutable assembly
Assembly asmApplication = Application.ResourceAssembly;
if (asmApplication != null)
{
_applicationResourceManagerWrapper = new ResourceManagerWrapper(asmApplication);
}
}
return _applicationResourceManagerWrapper;
}
}
// <summary>
// The FileShare mode to use for opening loose files. Currently this defaults to FileShare.Read
// Today it is not changed. If we decide that this should change in the future we can easily add
// a seter here.
// </summary>
internal static FileShare FileShare
{
get
{
return _fileShare;
}
}
#endregion
//------------------------------------------------------
//
// Public Constructors
//
//------------------------------------------------------
#region Public Constructors
// <summary>
// Default Constructor
// </summary>
internal ResourceContainer() : base(FileAccess.Read)
{
}
#endregion
//------------------------------------------------------
//
// Public Properties
//
//------------------------------------------------------
// None
//------------------------------------------------------
//
// Public Methods
//
//------------------------------------------------------
#region Public Methods
// <summary>
// This method always returns true. This is because ResourceManager does not have a
// simple way to check if a resource exists without loading the resource stream (or failing to)
// so checking if a resource exists would be a very expensive task.
// A part will later be constructed and returned by GetPart(). This part class contains
// a ResourceManager which may or may not contain the requested resource. When someone
// calls GetStream() on PackagePart then we will attempt to get the stream for the named resource
// and potentially fail.
// </summary>
// <param name="uri"></param>
// <returns></returns>
public override bool PartExists(Uri uri)
{
return true;
}
#endregion
//------------------------------------------------------
//
// Public Events
//
//------------------------------------------------------
// None
//------------------------------------------------------
//
// Internal Constructors
//
//------------------------------------------------------
// None
//------------------------------------------------------
//
// Internal Properties
//
//------------------------------------------------------
#region Internal Members
internal const string XamlExt = ".xaml";
internal const string BamlExt = ".baml";
#endregion
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
// None
//------------------------------------------------------
//
// Internal Events
//
//------------------------------------------------------
// None
//------------------------------------------------------
//
// Protected Constructors
//
//------------------------------------------------------
// None
//------------------------------------------------------
//
// Protected Methods
//
//------------------------------------------------------
#region Protected Methods
// <summary>
// This method creates a part containing the name of the resource and
// the resource manager that should contain it. If the resource manager
// does not contain the requested part then when GetStream() is called on
// the part it will return null.
// </summary>
// <param name="uri"></param>
// <returns></returns>
/// <SecurityNote>
/// 1. Critical - because calling add_AssemblyLoad
/// Safe - because only hooks up specific internal event handler only applicable to for non-browser hosted scneario
/// 2. Critical - because creating a new ResourcePart is critical, as the ResourceManagerWrapper
/// that it uses has an Assembly instance that is SecurityCritical data for accessing internal types.
/// Safe - because a ResourceManagerWrapper is being created based on a Uri name that gets mapped
/// to an assembly from which a stream for the Uri was created and GetResourceManagerWrapper
/// guarantees that given a Uri, the stream is always created from that mapped Assembly
/// </SecurityNote>
[SecurityCritical, SecurityTreatAsSafe]
protected override PackagePart GetPartCore(Uri uri)
{
string partName;
bool isContentFile;
// AppDomain.AssemblyLoad event handler for standalone apps. This is added specifically for designer (Sparkle) scenario.
// We use the assembly name to fetch the cached resource manager. With this mechanism we will still get resource from the
// old version dll when a newer one is loaded. So whenever the AssemblyLoad event is fired, we will need to update the cache
// with the newly loaded assembly. This is currently only for designer so not needed for browser hosted apps.
// Attach the event handler before the first time we get the ResourceManagerWrapper.
if ((! assemblyLoadhandlerAttached) && (! BrowserInteropHelper.IsBrowserHosted))
{
AppDomain.CurrentDomain.AssemblyLoad += new AssemblyLoadEventHandler(OnAssemblyLoadEventHandler);
assemblyLoadhandlerAttached = true;
}
ResourceManagerWrapper rmWrapper = GetResourceManagerWrapper(uri, out partName, out isContentFile);
// If the part name was specified as Content at compile time then we will try to load
// the file directly. Otherwise we assume the user is looking for a resource.
if (isContentFile)
{
return new ContentFilePart(this, uri);
}
else
{
// Uri mapps to a resource stream.
// Make sure the resource id is exactly same as the one we used to create Resource
// at compile time.
partName = ResourceIDHelper.GetResourceIDFromRelativePath(partName);
return new ResourcePart(this, uri, partName, rmWrapper);
}
}
#endregion
//------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
// AppDomain.AssemblyLoad event handler. Check whether the assembly's resourcemanager has
// been added to the cache. If it has, we need to update the cache with the newly loaded dll.
private void OnAssemblyLoadEventHandler(object sender, AssemblyLoadEventArgs args)
{
Assembly assembly = args.LoadedAssembly;
// This is specific for designer (Sparkle) scenario: rebuild and reload dll using Load(Byte[]).
// We do not care about assemblies loaded into the reflection-only context or the Gaced assemblies.
// For example, in Sparkle whenever a project is built all dependent assemblies will be loaded reflection only.
// We do no care about those. Only when a assembly is loaded into the execution context, we will need to update the cache.
if ((! assembly.ReflectionOnly) && (! assembly.GlobalAssemblyCache))
{
AssemblyName assemblyInfo = new AssemblyName(assembly.FullName);
string assemblyName = assemblyInfo.Name.ToLowerInvariant();
string assemblyKey = string.Empty;
string key = assemblyName;
// Check if this newly loaded assembly is in the cache. If so, update the cache.
// If it is not in cache, do not do anything. It will be added on demand.
// The key could be Name, Name + Version, Name + PublicKeyToken, or Name + Version + PublicKeyToken.
// Otherwise, update the cache with the newly loaded dll.
// First check Name.
UpdateCachedRMW(key, args.LoadedAssembly);
string assemblyVersion = assemblyInfo.Version.ToString();
if (!String.IsNullOrEmpty(assemblyVersion))
{
key = key + assemblyVersion;
// Check Name + Version
UpdateCachedRMW(key, args.LoadedAssembly);
}
byte[] reqKeyToken = assemblyInfo.GetPublicKeyToken();
for (int i = 0; i < reqKeyToken.Length; i++)
{
assemblyKey += reqKeyToken[i].ToString("x", NumberFormatInfo.InvariantInfo);
}
if (!String.IsNullOrEmpty(assemblyKey))
{
key = key + assemblyKey;
// Check Name + Version + KeyToken
UpdateCachedRMW(key, args.LoadedAssembly);
key = assemblyName + assemblyKey;
// Check Name + KeyToken
UpdateCachedRMW(key, args.LoadedAssembly);
}
}
}
private void UpdateCachedRMW(string key, Assembly assembly)
{
if (_registeredResourceManagers.ContainsKey(key))
{
// Update the ResourceManagerWrapper with the new assembly.
// Note Package caches Part and Part holds on to ResourceManagerWrapper. Package does not provide a way for
// us to update their cache, so we update the assembly that the ResourceManagerWrapper holds on to. This way the
// Part cached in the Package class can reference the new dll too.
_registeredResourceManagers[key].Assembly = assembly;
}
}
// <summary>
// Searches the available ResourceManagerWrapper list for one that matches the given Uri.
// It could be either ResourceManagerWrapper for specific libary assembly or Application
// main assembly. Package enforces that all Uri will be correctly formated.
// </summary>
// <param name="uri">Assumed to be relative</param>
// <param name="partName">The name of the file in the resource manager</param>
// <param name="isContentFile">A flag to indicate that this path is a known loose file at compile time</param>
// <returns></returns>
private ResourceManagerWrapper GetResourceManagerWrapper(Uri uri, out string partName, out bool isContentFile)
{
string assemblyName;
string assemblyVersion;
string assemblyKey;
ResourceManagerWrapper rmwResult = ApplicationResourceManagerWrapper;
isContentFile = false;
BaseUriHelper.GetAssemblyNameAndPart(uri, out partName, out assemblyName, out assemblyVersion, out assemblyKey);
if (!String.IsNullOrEmpty(assemblyName))
{
string key = assemblyName + assemblyVersion + assemblyKey;
_registeredResourceManagers.TryGetValue(key.ToLowerInvariant(), out rmwResult);
// first time. Add this to the hash table
if (rmwResult == null)
{
Assembly assembly;
assembly = BaseUriHelper.GetLoadedAssembly(assemblyName, assemblyVersion, assemblyKey);
if (assembly.Equals(Application.ResourceAssembly))
{
// This Uri maps to Application Entry assembly even though it has ";component".
rmwResult = ApplicationResourceManagerWrapper;
}
else
{
rmwResult = new ResourceManagerWrapper(assembly);
}
_registeredResourceManagers[key.ToLowerInvariant()] = rmwResult;
}
}
if ((rmwResult == ApplicationResourceManagerWrapper))
{
if (rmwResult != null)
{
// If this is not a resource from a component then it might be
// a content file and not an application resource.
if (ContentFileHelper.IsContentFile(partName))
{
isContentFile = true;
rmwResult = null;
}
}
else
{
// Throw when Application.ResourceAssembly is null.
throw new IOException(SR.Get(SRID.EntryAssemblyIsNull));
}
}
return rmwResult;
}
#endregion
//------------------------------------------------------
//
// Private Fields
//
//------------------------------------------------------
#region Private Members
private static Dictionary<string, ResourceManagerWrapper> _registeredResourceManagers = new Dictionary<string, ResourceManagerWrapper>();
private static ResourceManagerWrapper _applicationResourceManagerWrapper = null;
private static FileShare _fileShare = FileShare.Read;
private static bool assemblyLoadhandlerAttached = false;
#endregion Private Members
//------------------------------------------------------
//
// Uninteresting (but required) overrides
//
//------------------------------------------------------
#region Uninteresting (but required) overrides
protected override PackagePart CreatePartCore(Uri uri, string contentType, CompressionOption compressionOption)
{
return null;
}
protected override void DeletePartCore(Uri uri)
{
throw new NotSupportedException();
}
protected override PackagePart[] GetPartsCore()
{
throw new NotSupportedException();
}
protected override void FlushCore()
{
throw new NotSupportedException();
}
#endregion
}
}
| |
//! \file ImageWCG.cs
//! \date Sat Jul 19 23:07:32 2014
//! \brief Liar-soft WCG image format implementation.
//
// Copyright (C) 2014-2016 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.IO;
using System.ComponentModel.Composition;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Collections.Generic;
using System.Text;
using GameRes.Utility;
namespace GameRes.Formats.Liar
{
[Export(typeof(ImageFormat))]
public class WcgFormat : ImageFormat
{
public override string Tag { get { return "WCG"; } }
public override string Description { get { return "Liar-soft proprietary image format"; } }
public override uint Signature { get { return 0x02714757; } }
public override bool CanWrite { get { return true; } }
public WcgFormat ()
{
Signatures = new uint[] { 0x02714757, 0xF2714757 };
}
public override ImageMetaData ReadMetaData (IBinaryStream file)
{
if (0x57 != file.ReadByte() || 0x47 != file.ReadByte())
return null;
uint flags = file.ReadUInt16();
if (1 != (flags & 0x0f) || 0x20 != file.ReadByte() || 0 != file.ReadByte())
return null;
var meta = new ImageMetaData();
file.Position = 8;
meta.Width = file.ReadUInt32();
meta.Height = file.ReadUInt32();
meta.BPP = 32;
return meta;
}
public override ImageData Read (IBinaryStream file, ImageMetaData info)
{
using (var reader = new Reader (file, info.Width * info.Height))
{
reader.Unpack();
return ImageData.Create (info, PixelFormats.Bgra32, null, reader.Data);
}
}
public override void Write (Stream file, ImageData image)
{
Stream stream = file;
bool buffer_used = false;
if (!stream.CanSeek)
{
stream = new MemoryStream();
buffer_used = true;
}
try
{
using (var writer = new Writer (stream, image.Bitmap))
{
writer.Pack();
}
if (buffer_used)
{
stream.Position = 0;
stream.CopyTo (file);
}
}
finally
{
if (buffer_used)
stream.Dispose();
}
}
internal sealed class Reader : IDisposable
{
private byte[] m_data;
private IBinaryStream m_input;
private MsbBitStream m_bits;
private uint m_input_size;
private ushort[] m_index;
private uint m_next_ptr;
private uint m_next_size;
private uint m_src;
private uint m_src_size;
private int m_dst_size;
private int edi;
private int m_index_length_limit;
public byte[] Data { get { return m_data; } }
public Reader (IBinaryStream file, uint pixel_size)
{
m_data = new byte[pixel_size*4];
m_input_size = (uint)file.Length;
m_input = file;
m_bits = new MsbBitStream (file.AsStream, true);
}
public void Unpack ()
{
m_next_ptr = 16;
m_next_size = m_input_size-16;
if (Unpack (2))
Unpack (0);
for (uint i = 3; i < m_data.Length; i += 4)
m_data[i] = (byte)~m_data[i];
}
private bool Unpack (int offset)
{
m_src = m_next_ptr;
m_src_size = m_next_size;
m_dst_size = (int)(m_data.Length / 4);
if (m_src_size < 12)
throw new InvalidFormatException ("Invalid file size");
m_src_size -= 12;
m_input.Position = m_next_ptr;
int unpacked_size = m_input.ReadInt32();
uint data_size = m_input.ReadUInt32();
uint index_size = m_input.ReadUInt16(); // 8
if (unpacked_size != m_dst_size*2)
throw new InvalidFormatException ("Invalid image size");
if (0 == index_size || index_size*2 > m_src_size)
throw new InvalidFormatException ("Invalid palette size");
m_src_size -= index_size*2;
if (data_size > m_src_size)
throw new InvalidFormatException ("Invalid compressed data size");
uint data_pos = m_src + index_size*2 + 12;
edi = offset;
m_next_size = m_src_size - data_size;
m_next_ptr = data_pos + data_size;
m_src_size = data_size;
return DecodeStream (data_pos, index_size);
}
void ReadIndex (uint index_size)
{
m_input.Position = m_src+12;
m_index = new ushort[index_size];
for (int i = 0; i < index_size; ++i)
m_index[i] = m_input.ReadUInt16();
}
bool DecodeStream (uint data_pos, uint index_size)
{
ReadIndex (index_size);
m_input.Position = data_pos;
m_bits.Reset();
bool small_index = index_size < 0x1002;
m_index_length_limit = small_index ? 6 : 14;
int index_bit_length = small_index ? 3 : 4;
while (m_dst_size > 0)
{
int dst_count = 1;
int index_length = m_bits.GetBits (index_bit_length);
if (0 == index_length)
{
dst_count = m_bits.GetBits (4) + 2;
index_length = m_bits.GetBits (index_bit_length);
}
if (0 == index_length)
return false;
int index = GetIndex (index_length);
if (index >= index_size)
return false;
if (dst_count > m_dst_size)
return false;
m_dst_size -= dst_count;
ushort word = m_index[index];
do {
LittleEndian.Pack (word, m_data, edi);
edi += 4;
} while (0 != --dst_count);
}
return true;
}
int GetIndex (int count)
{
if (0 == --count)
return m_bits.GetNextBit();
if (count < m_index_length_limit)
return 1 << count | m_bits.GetBits (count);
while (0 != m_bits.GetNextBit())
{
if (count >= 0x10)
throw new InvalidFormatException ("Invalid index count");
++count;
}
return 1 << count | m_bits.GetBits (count);
}
#region IDisposable Members
bool disposed = false;
public void Dispose ()
{
if (!disposed)
{
m_bits.Dispose();
disposed = true;
}
GC.SuppressFinalize (this);
}
#endregion
}
private class Writer : IDisposable
{
private BinaryWriter m_out;
private uint m_width;
private uint m_height;
private uint m_pixels;
private byte[] m_data;
Dictionary<ushort, ushort> m_index = new Dictionary<ushort, ushort>();
private uint m_base_length;
private uint m_base_index_length;
private int m_bits;
public Writer (Stream stream, BitmapSource bitmap)
{
m_width = (uint)bitmap.PixelWidth;
m_height = (uint)bitmap.PixelHeight;
m_pixels = m_width*m_height;
if (bitmap.Format != PixelFormats.Bgra32)
{
var converted_bitmap = new FormatConvertedBitmap();
converted_bitmap.BeginInit();
converted_bitmap.Source = bitmap;
converted_bitmap.DestinationFormat = PixelFormats.Bgra32;
converted_bitmap.EndInit();
bitmap = converted_bitmap;
}
m_data = new byte[m_pixels*4];
bitmap.CopyPixels (m_data, bitmap.PixelWidth*4, 0);
m_out = new BinaryWriter (stream, Encoding.ASCII, true);
}
public void Pack ()
{
byte[] header = { (byte)'W', (byte)'G', 0x71, 2, 0x20, 0, 0, 0x40 };
m_out.Write (header, 0, header.Length);
m_out.Write (m_width);
m_out.Write (m_height);
Pack (1, 0xff00);
Pack (0, 0);
}
ushort GetWord (int offset)
{
return (ushort)(m_data[offset*2] | m_data[offset*2+1] << 8);
}
private void Pack (int data, ushort mask)
{
var header_pos = m_out.Seek (0, SeekOrigin.Current);
m_out.Seek (12, SeekOrigin.Current);
BuildIndex (data, mask);
bool small_index = m_index.Count < 0x1002;
m_base_length = small_index ? 3u : 4u;
m_base_index_length = small_index ? 7u : 15u;
m_bits = 1;
// encode
for (uint i = 0; i < m_pixels;)
{
ushort word = GetWord (data);
data += 2;
++i;
ushort color = m_index[(ushort)(word^mask)];
uint count = 1;
while (i < m_pixels)
{
if (word != GetWord (data))
break;
++count;
data += 2;
++i;
if (0x11 == count)
break;
}
if (count > 1)
{
PutBits (m_base_length, 0);
PutBits (4, count-2);
}
PutIndex (color);
}
Flush();
var end_pos = m_out.Seek (0, SeekOrigin.Current);
uint data_size = (uint)(end_pos - header_pos - 12 - m_index.Count*2);
m_out.Seek ((int)header_pos, SeekOrigin.Begin);
m_out.Write (m_pixels*2u);
m_out.Write (data_size);
m_out.Write ((ushort)m_index.Count);
m_out.Write ((ushort)(small_index ? 7 : 14)); // 0x0e
m_out.Seek ((int)end_pos, SeekOrigin.Begin);
}
void BuildIndex (int data, ushort mask)
{
m_index.Clear();
uint[] freq_table = new uint[65536];
for (var data_end = data + m_pixels*2; data < data_end; data += 2)
freq_table[GetWord (data)^mask]++;
var index = new List<ushort>();
for (int i = 0; i < freq_table.Length; ++i)
{
if (0 != freq_table[i])
index.Add ((ushort)i);
}
index.Sort ((a, b) => freq_table[a] < freq_table[b] ? 1 : freq_table[a] == freq_table[b] ? 0 : -1);
ushort j = 0;
foreach (var color in index)
{
m_out.Write (color);
m_index.Add (color, j++);
}
}
void Flush ()
{
if (1 != m_bits)
{
do
m_bits <<= 1;
while (0 == (m_bits & 0x100));
m_out.Write ((byte)(m_bits & 0xff));
m_bits = 1;
}
}
void PutBit (bool bit)
{
m_bits <<= 1;
m_bits |= bit ? 1 : 0;
if (0 != (m_bits & 0x100))
{
m_out.Write ((byte)(m_bits & 0xff));
m_bits = 1;
}
}
void PutBits (uint length, uint x)
{
x <<= (int)(32-length);
while (0 != length--)
{
PutBit (0 != (x & 0x80000000));
x <<= 1;
}
}
static uint GetBitsLength (ushort val)
{
uint length = 0;
do
{
++length;
val >>= 1;
}
while (0 != val);
return length;
}
void PutIndex (ushort index)
{
uint length = GetBitsLength (index);
if (length < m_base_index_length)
{
PutBits (m_base_length, length);
if (1 == length)
PutBit (index != 0);
else
PutBits (length-1, index);
}
else
{
PutBits (m_base_length, m_base_index_length);
for (uint i = m_base_index_length; i < length; ++i)
PutBit (true);
PutBit (false);
PutBits (length-1, index);
}
}
#region IDisposable Members
bool disposed = false;
public void Dispose ()
{
Dispose (true);
GC.SuppressFinalize (this);
}
protected virtual void Dispose (bool disposing)
{
if (!disposed)
{
if (disposing)
m_out.Dispose();
m_out = null;
m_data = null;
m_index = null;
disposed = true;
}
}
#endregion
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using Xunit;
namespace System.Linq.Expressions.Tests
{
public static class BinaryNullableModuloTests
{
#region Test methods
[Fact]
public static void CheckNullableByteModuloTest()
{
byte?[] array = new byte?[] { 0, 1, byte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableByteModulo(array[i], array[j]);
}
}
}
[Fact]
public static void CheckNullableSByteModuloTest()
{
sbyte?[] array = new sbyte?[] { 0, 1, -1, sbyte.MinValue, sbyte.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableSByteModulo(array[i], array[j]);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableUShortModuloTest(bool useInterpreter)
{
ushort?[] array = new ushort?[] { 0, 1, ushort.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableUShortModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableShortModuloTest(bool useInterpreter)
{
short?[] array = new short?[] { 0, 1, -1, short.MinValue, short.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableShortModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableUIntModuloTest(bool useInterpreter)
{
uint?[] array = new uint?[] { 0, 1, uint.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableUIntModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableIntModuloTest(bool useInterpreter)
{
int?[] array = new int?[] { 0, 1, -1, int.MinValue, int.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableIntModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableULongModuloTest(bool useInterpreter)
{
ulong?[] array = new ulong?[] { 0, 1, ulong.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableULongModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableLongModuloTest(bool useInterpreter)
{
long?[] array = new long?[] { 0, 1, -1, long.MinValue, long.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableLongModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableFloatModuloTest(bool useInterpreter)
{
float?[] array = new float?[] { 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableFloatModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableDoubleModuloTest(bool useInterpreter)
{
double?[] array = new double?[] { 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableDoubleModulo(array[i], array[j], useInterpreter);
}
}
}
[Theory, ClassData(typeof(CompilationTypes))]
public static void CheckNullableDecimalModuloTest(bool useInterpreter)
{
decimal?[] array = new decimal?[] { decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableDecimalModulo(array[i], array[j], useInterpreter);
}
}
}
[Fact]
public static void CheckNullableCharModuloTest()
{
char?[] array = new char?[] { '\0', '\b', 'A', '\uffff' };
for (int i = 0; i < array.Length; i++)
{
for (int j = 0; j < array.Length; j++)
{
VerifyNullableCharModulo(array[i], array[j]);
}
}
}
#endregion
#region Test verifiers
private static void VerifyNullableByteModulo(byte? a, byte? b)
{
Expression aExp = Expression.Constant(a, typeof(byte?));
Expression bExp = Expression.Constant(b, typeof(byte?));
Assert.Throws<InvalidOperationException>(() => Expression.Modulo(aExp, bExp));
}
private static void VerifyNullableSByteModulo(sbyte? a, sbyte? b)
{
Expression aExp = Expression.Constant(a, typeof(sbyte?));
Expression bExp = Expression.Constant(b, typeof(sbyte?));
Assert.Throws<InvalidOperationException>(() => Expression.Modulo(aExp, bExp));
}
private static void VerifyNullableUShortModulo(ushort? a, ushort? b, bool useInterpreter)
{
Expression<Func<ushort?>> e =
Expression.Lambda<Func<ushort?>>(
Expression.Modulo(
Expression.Constant(a, typeof(ushort?)),
Expression.Constant(b, typeof(ushort?))),
Enumerable.Empty<ParameterExpression>());
Func<ushort?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableShortModulo(short? a, short? b, bool useInterpreter)
{
Expression<Func<short?>> e =
Expression.Lambda<Func<short?>>(
Expression.Modulo(
Expression.Constant(a, typeof(short?)),
Expression.Constant(b, typeof(short?))),
Enumerable.Empty<ParameterExpression>());
Func<short?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableUIntModulo(uint? a, uint? b, bool useInterpreter)
{
Expression<Func<uint?>> e =
Expression.Lambda<Func<uint?>>(
Expression.Modulo(
Expression.Constant(a, typeof(uint?)),
Expression.Constant(b, typeof(uint?))),
Enumerable.Empty<ParameterExpression>());
Func<uint?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableIntModulo(int? a, int? b, bool useInterpreter)
{
Expression<Func<int?>> e =
Expression.Lambda<Func<int?>>(
Expression.Modulo(
Expression.Constant(a, typeof(int?)),
Expression.Constant(b, typeof(int?))),
Enumerable.Empty<ParameterExpression>());
Func<int?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else if (b == -1 && a == int.MinValue)
Assert.Throws<OverflowException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableULongModulo(ulong? a, ulong? b, bool useInterpreter)
{
Expression<Func<ulong?>> e =
Expression.Lambda<Func<ulong?>>(
Expression.Modulo(
Expression.Constant(a, typeof(ulong?)),
Expression.Constant(b, typeof(ulong?))),
Enumerable.Empty<ParameterExpression>());
Func<ulong?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableLongModulo(long? a, long? b, bool useInterpreter)
{
Expression<Func<long?>> e =
Expression.Lambda<Func<long?>>(
Expression.Modulo(
Expression.Constant(a, typeof(long?)),
Expression.Constant(b, typeof(long?))),
Enumerable.Empty<ParameterExpression>());
Func<long?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else if (b == -1 && a == long.MinValue)
Assert.Throws<OverflowException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableFloatModulo(float? a, float? b, bool useInterpreter)
{
Expression<Func<float?>> e =
Expression.Lambda<Func<float?>>(
Expression.Modulo(
Expression.Constant(a, typeof(float?)),
Expression.Constant(b, typeof(float?))),
Enumerable.Empty<ParameterExpression>());
Func<float?> f = e.Compile(useInterpreter);
Assert.Equal(a % b, f());
}
private static void VerifyNullableDoubleModulo(double? a, double? b, bool useInterpreter)
{
Expression<Func<double?>> e =
Expression.Lambda<Func<double?>>(
Expression.Modulo(
Expression.Constant(a, typeof(double?)),
Expression.Constant(b, typeof(double?))),
Enumerable.Empty<ParameterExpression>());
Func<double?> f = e.Compile(useInterpreter);
Assert.Equal(a % b, f());
}
private static void VerifyNullableDecimalModulo(decimal? a, decimal? b, bool useInterpreter)
{
Expression<Func<decimal?>> e =
Expression.Lambda<Func<decimal?>>(
Expression.Modulo(
Expression.Constant(a, typeof(decimal?)),
Expression.Constant(b, typeof(decimal?))),
Enumerable.Empty<ParameterExpression>());
Func<decimal?> f = e.Compile(useInterpreter);
if (b == 0)
Assert.Throws<DivideByZeroException>(() => f());
else
Assert.Equal(a % b, f());
}
private static void VerifyNullableCharModulo(char? a, char? b)
{
Expression aExp = Expression.Constant(a, typeof(char?));
Expression bExp = Expression.Constant(b, typeof(char?));
Assert.Throws<InvalidOperationException>(() => Expression.Modulo(aExp, bExp));
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyDateTime
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Models;
/// <summary>
/// Datetime operations.
/// </summary>
public partial interface IDatetime
{
/// <summary>
/// Get null datetime value
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetNullWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get invalid datetime value
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetInvalidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get overflow datetime value
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetOverflowWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get underflow datetime value
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetUnderflowWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Put max datetime value 9999-12-31T23:59:59.9999999Z
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse> PutUtcMaxDateTimeWithHttpMessagesAsync(DateTime datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get max datetime value 9999-12-31t23:59:59.9999999z
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetUtcLowercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get max datetime value 9999-12-31T23:59:59.9999999Z
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetUtcUppercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Put max datetime value with positive numoffset
/// 9999-12-31t23:59:59.9999999+14:00
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse> PutLocalPositiveOffsetMaxDateTimeWithHttpMessagesAsync(DateTime datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31t23:59:59.9999999+14:00
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetLocalPositiveOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31T23:59:59.9999999+14:00
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetLocalPositiveOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Put max datetime value with positive numoffset
/// 9999-12-31t23:59:59.9999999-14:00
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse> PutLocalNegativeOffsetMaxDateTimeWithHttpMessagesAsync(DateTime datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31T23:59:59.9999999-14:00
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetLocalNegativeOffsetUppercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get max datetime value with positive num offset
/// 9999-12-31t23:59:59.9999999-14:00
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetLocalNegativeOffsetLowercaseMaxDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00Z
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse> PutUtcMinDateTimeWithHttpMessagesAsync(DateTime datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00Z
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetUtcMinDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00+14:00
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse> PutLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(DateTime datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00+14:00
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetLocalPositiveOffsetMinDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Put min datetime value 0001-01-01T00:00:00-14:00
/// </summary>
/// <param name='datetimeBody'>
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse> PutLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(DateTime datetimeBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Get min datetime value 0001-01-01T00:00:00-14:00
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<HttpOperationResponse<DateTime?>> GetLocalNegativeOffsetMinDateTimeWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using Microsoft.AspNetCore.Components.Reflection;
using Microsoft.AspNetCore.Components.Rendering;
using Microsoft.AspNetCore.Components.RenderTree;
namespace Microsoft.AspNetCore.Components
{
/// <summary>
/// Represents a collection of parameters supplied to an <see cref="IComponent"/>
/// by its parent in the render tree.
/// </summary>
public readonly struct ParameterView
{
private static readonly RenderTreeFrame[] _emptyFrames = new RenderTreeFrame[]
{
RenderTreeFrame.Element(0, string.Empty).WithComponentSubtreeLength(1)
};
private static readonly ParameterView _empty = new ParameterView(ParameterViewLifetime.Unbound, _emptyFrames, 0, Array.Empty<CascadingParameterState>());
private readonly ParameterViewLifetime _lifetime;
private readonly RenderTreeFrame[] _frames;
private readonly int _ownerIndex;
private readonly IReadOnlyList<CascadingParameterState> _cascadingParameters;
internal ParameterView(in ParameterViewLifetime lifetime, RenderTreeFrame[] frames, int ownerIndex)
: this(lifetime, frames, ownerIndex, Array.Empty<CascadingParameterState>())
{
}
private ParameterView(in ParameterViewLifetime lifetime, RenderTreeFrame[] frames, int ownerIndex, IReadOnlyList<CascadingParameterState> cascadingParameters)
{
_lifetime = lifetime;
_frames = frames;
_ownerIndex = ownerIndex;
_cascadingParameters = cascadingParameters;
}
/// <summary>
/// Gets an empty <see cref="ParameterView"/>.
/// </summary>
public static ParameterView Empty => _empty;
internal ParameterViewLifetime Lifetime => _lifetime;
/// <summary>
/// Returns an enumerator that iterates through the <see cref="ParameterView"/>.
/// </summary>
/// <returns>The enumerator.</returns>
public Enumerator GetEnumerator()
{
_lifetime.AssertNotExpired();
return new Enumerator(_frames, _ownerIndex, _cascadingParameters);
}
/// <summary>
/// Gets the value of the parameter with the specified name.
/// </summary>
/// <typeparam name="TValue">The type of the value.</typeparam>
/// <param name="parameterName">The name of the parameter.</param>
/// <param name="result">Receives the result, if any.</param>
/// <returns>True if a matching parameter was found; false otherwise.</returns>
public bool TryGetValue<TValue>(string parameterName, [MaybeNullWhen(false)] out TValue result)
{
foreach (var entry in this)
{
if (string.Equals(entry.Name, parameterName))
{
result = (TValue)entry.Value;
return true;
}
}
result = default;
return false;
}
/// <summary>
/// Gets the value of the parameter with the specified name, or a default value
/// if no such parameter exists in the collection.
/// </summary>
/// <typeparam name="TValue">The type of the value.</typeparam>
/// <param name="parameterName">The name of the parameter.</param>
/// <returns>The parameter value if found; otherwise the default value for the specified type.</returns>
public TValue? GetValueOrDefault<TValue>(string parameterName)
=> GetValueOrDefault<TValue?>(parameterName, default);
/// <summary>
/// Gets the value of the parameter with the specified name, or a specified default value
/// if no such parameter exists in the collection.
/// </summary>
/// <typeparam name="TValue">The type of the value.</typeparam>
/// <param name="parameterName">The name of the parameter.</param>
/// <param name="defaultValue">The default value to return if no such parameter exists in the collection.</param>
/// <returns>The parameter value if found; otherwise <paramref name="defaultValue"/>.</returns>
public TValue GetValueOrDefault<TValue>(string parameterName, TValue defaultValue)
=> TryGetValue<TValue>(parameterName, out TValue? result) ? result : defaultValue;
/// <summary>
/// Returns a dictionary populated with the contents of the <see cref="ParameterView"/>.
/// </summary>
/// <returns>A dictionary populated with the contents of the <see cref="ParameterView"/>.</returns>
public IReadOnlyDictionary<string, object> ToDictionary()
{
var result = new Dictionary<string, object>();
foreach (var entry in this)
{
result[entry.Name] = entry.Value;
}
return result;
}
internal ParameterView Clone()
{
if (ReferenceEquals(_frames, _emptyFrames))
{
return Empty;
}
var numEntries = GetEntryCount();
var cloneBuffer = new RenderTreeFrame[1 + numEntries];
cloneBuffer[0] = RenderTreeFrame.PlaceholderChildComponentWithSubtreeLength(1 + numEntries);
_frames.AsSpan(1, numEntries).CopyTo(cloneBuffer.AsSpan(1));
return new ParameterView(Lifetime, cloneBuffer, _ownerIndex);
}
internal ParameterView WithCascadingParameters(IReadOnlyList<CascadingParameterState> cascadingParameters)
=> new ParameterView(_lifetime, _frames, _ownerIndex, cascadingParameters);
// It's internal because there isn't a known use case for user code comparing
// ParameterView instances, and even if there was, it's unlikely it should
// use these equality rules which are designed for their effect on rendering.
internal bool DefinitelyEquals(ParameterView oldParameters)
{
// In general we can't detect mutations on arbitrary objects. We can't trust
// things like .Equals or .GetHashCode because they usually only tell us about
// shallow changes, not deep mutations. So we return false if both:
// [1] All the parameters are known to be immutable (i.e., Type.IsPrimitive
// or is in a known set of common immutable types)
// [2] And all the parameter values are equal to their previous values
// Otherwise be conservative and return false.
// To make this check cheaper, since parameters are virtually always generated in
// a deterministic order, we don't bother to account for reordering, so if any
// of the names don't match sequentially we just return false too.
//
// The logic here may look kind of epic, and would certainly be simpler if we
// used ParameterEnumerator.GetEnumerator(), but it's perf-critical and this
// implementation requires a lot fewer instructions than a GetEnumerator-based one.
var oldIndex = oldParameters._ownerIndex;
var newIndex = _ownerIndex;
var oldEndIndexExcl = oldIndex + oldParameters._frames[oldIndex].ComponentSubtreeLengthField;
var newEndIndexExcl = newIndex + _frames[newIndex].ComponentSubtreeLengthField;
while (true)
{
// First, stop if we've reached the end of either subtree
oldIndex++;
newIndex++;
var oldFinished = oldIndex == oldEndIndexExcl;
var newFinished = newIndex == newEndIndexExcl;
if (oldFinished || newFinished)
{
return oldFinished == newFinished; // Same only if we have same number of parameters
}
else
{
// Since neither subtree has finished, it's safe to read the next frame from both
ref var oldFrame = ref oldParameters._frames[oldIndex];
ref var newFrame = ref _frames[newIndex];
// Stop if we've reached the end of either subtree's sequence of attributes
oldFinished = oldFrame.FrameTypeField != RenderTreeFrameType.Attribute;
newFinished = newFrame.FrameTypeField != RenderTreeFrameType.Attribute;
if (oldFinished || newFinished)
{
return oldFinished == newFinished; // Same only if we have same number of parameters
}
else
{
if (!string.Equals(oldFrame.AttributeNameField, newFrame.AttributeNameField, StringComparison.Ordinal))
{
return false; // Different names
}
var oldValue = oldFrame.AttributeValueField;
var newValue = newFrame.AttributeValueField;
if (ChangeDetection.MayHaveChanged(oldValue, newValue))
{
return false;
}
}
}
}
}
internal void CaptureSnapshot(ArrayBuilder<RenderTreeFrame> builder)
{
builder.Clear();
var numEntries = GetEntryCount();
// We need to prefix the captured frames with an "owner" frame that
// describes the length of the buffer so that ParameterView
// knows how far to iterate through it.
var owner = RenderTreeFrame.PlaceholderChildComponentWithSubtreeLength(1 + numEntries);
builder.Append(owner);
if (numEntries > 0)
{
builder.Append(_frames, _ownerIndex + 1, numEntries);
}
}
private int GetEntryCount()
{
var numEntries = 0;
foreach (var _ in this)
{
numEntries++;
}
return numEntries;
}
/// <summary>
/// Creates a new <see cref="ParameterView"/> from the given <see cref="IDictionary{TKey, TValue}"/>.
/// </summary>
/// <param name="parameters">The <see cref="IDictionary{TKey, TValue}"/> with the parameters.</param>
/// <returns>A <see cref="ParameterView"/>.</returns>
public static ParameterView FromDictionary(IDictionary<string, object?> parameters)
{
var builder = new ParameterViewBuilder(parameters.Count);
foreach (var kvp in parameters)
{
builder.Add(kvp.Key, kvp.Value);
}
return builder.ToParameterView();
}
/// <summary>
/// For each parameter property on <paramref name="target"/>, updates its value to
/// match the corresponding entry in the <see cref="ParameterView"/>.
/// </summary>
/// <param name="target">An object that has a public writable property matching each parameter's name and type.</param>
public void SetParameterProperties(object target)
{
if (target is null)
{
throw new ArgumentNullException(nameof(target));
}
ComponentProperties.SetProperties(this, target);
}
/// <summary>
/// An enumerator that iterates through a <see cref="ParameterView"/>.
/// </summary>
// Note that this intentionally does not implement IEnumerator<>. This is the same pattern as Span<>.Enumerator
// it's valid to foreach over a type that doesn't implement IEnumerator<>.
public struct Enumerator
{
private RenderTreeFrameParameterEnumerator _directParamsEnumerator;
private CascadingParameterEnumerator _cascadingParameterEnumerator;
private bool _isEnumeratingDirectParams;
internal Enumerator(RenderTreeFrame[] frames, int ownerIndex, IReadOnlyList<CascadingParameterState> cascadingParameters)
{
_directParamsEnumerator = new RenderTreeFrameParameterEnumerator(frames, ownerIndex);
_cascadingParameterEnumerator = new CascadingParameterEnumerator(cascadingParameters);
_isEnumeratingDirectParams = true;
}
/// <summary>
/// Gets the current value of the enumerator.
/// </summary>
public ParameterValue Current => _isEnumeratingDirectParams
? _directParamsEnumerator.Current
: _cascadingParameterEnumerator.Current;
/// <summary>
/// Instructs the enumerator to move to the next value in the sequence.
/// </summary>
/// <returns>A flag to indicate whether or not there is a next value.</returns>
public bool MoveNext()
{
if (_isEnumeratingDirectParams)
{
if (_directParamsEnumerator.MoveNext())
{
return true;
}
else
{
_isEnumeratingDirectParams = false;
}
}
return _cascadingParameterEnumerator.MoveNext();
}
}
private struct RenderTreeFrameParameterEnumerator
{
private readonly RenderTreeFrame[] _frames;
private readonly int _ownerIndex;
private readonly int _ownerDescendantsEndIndexExcl;
private int _currentIndex;
private ParameterValue _current;
internal RenderTreeFrameParameterEnumerator(RenderTreeFrame[] frames, int ownerIndex)
{
_frames = frames;
_ownerIndex = ownerIndex;
_ownerDescendantsEndIndexExcl = ownerIndex + _frames[ownerIndex].ElementSubtreeLengthField;
_currentIndex = ownerIndex;
_current = default;
}
public ParameterValue Current => _current;
public bool MoveNext()
{
// Stop iteration if you get to the end of the owner's descendants...
var nextIndex = _currentIndex + 1;
if (nextIndex == _ownerDescendantsEndIndexExcl)
{
return false;
}
// ... or if you get to its first non-attribute descendant (because attributes
// are always before any other type of descendant)
if (_frames[nextIndex].FrameTypeField != RenderTreeFrameType.Attribute)
{
return false;
}
_currentIndex = nextIndex;
ref var frame = ref _frames[_currentIndex];
_current = new ParameterValue(frame.AttributeNameField, frame.AttributeValueField, false);
return true;
}
}
private struct CascadingParameterEnumerator
{
private readonly IReadOnlyList<CascadingParameterState> _cascadingParameters;
private int _currentIndex;
private ParameterValue _current;
public CascadingParameterEnumerator(IReadOnlyList<CascadingParameterState> cascadingParameters)
{
_cascadingParameters = cascadingParameters;
_currentIndex = -1;
_current = default;
}
public ParameterValue Current => _current;
public bool MoveNext()
{
var nextIndex = _currentIndex + 1;
if (nextIndex < _cascadingParameters.Count)
{
_currentIndex = nextIndex;
var state = _cascadingParameters[_currentIndex];
_current = new ParameterValue(state.LocalValueName, state.ValueSupplier.CurrentValue!, true);
return true;
}
else
{
return false;
}
}
}
}
}
| |
//-----------------------------------------------------------------------
//
// Microsoft Windows Client Platform
// Copyright (C) Microsoft Corporation, 2001
//
// File: TextMetrics.cs
//
// Contents: Full text implementation of ITextMetrics
//
// Created: 5-6-2002 Worachai Chaoweeraprasit (wchao)
// History: 2-3-2005 (wchao) Refactoring from FullTextLine for optimal paragraph usage.
//
//------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.Windows;
using System.Windows.Media;
using System.Windows.Media.TextFormatting;
using System.Security;
using System.Security.Permissions;
using MS.Internal.FontCache;
using SR = MS.Internal.PresentationCore.SR;
using SRID = MS.Internal.PresentationCore.SRID;
namespace MS.Internal.TextFormatting
{
internal partial struct TextMetrics : ITextMetrics
{
private TextFormatterImp _formatter; // text formatter formatting this metrics
private int _lscpLim; // number of LSCP in the line (for boundary condition handling)
private int _cchLength; // actual character count
private int _cchDepend; // number of chars after linebreak that triggers reformatting this line
private int _cchNewline; // number of chars of newline symbol
private int _height; // line height
private int _textHeight; // measured height of text within the line
private int _baselineOffset; // offset from top of line height to baseline
private int _textAscent; // offset from top of text height to baseline
private int _textStart; // distance from LS origin to text start
private int _textWidth; // text start to end
private int _textWidthAtTrailing; // text start to end excluding trailing whitespaces
private int _paragraphToText; // paragraph start to text start
private LSRun _lastRun; // Last Text LSRun
/// <summary>
/// Construct text metrics from full text info
/// </summary>
/// <remarks>
///
/// When the application formats a line of text. It starts from the leading edge of the paragraph - the reference position
/// called "Paragraph Start". It gives the width of the paragraph or "Paragraph Width" to TextFormatter as one of the main
/// parameters to TextFormatter.FormatLine method. It may also provide additional info about how it wants the line to look
/// like. The following are all of such info and how the formatting process is carried on inside TextFormatter.
///
///
/// *** Indent/Paragraph Indent ***
/// The application may specify "Indent" - the distance from the beginning of the line to the beginning of the text in that
/// line. The value is sent to TextFormatter via [TextParagraphProperties.Indent]. It may also specify "Paragraph Indent"
/// - the distance from the beginning of the paragraph to the beginning of the line [TextParagraphProperties.ParagraphIndent].
/// The usage of paragraph indent is to offset the beginning of the line relative to the paragraph starting point, while
/// indent is to offset the beginning of text realtive to the line starting point. Paragraph indent is not included as part
/// of the line width while indent is.
///
///
/// *** Text Alignment ***
/// "Text Alignment" [TextParagraphProperties.TextAlignment] may be specified to align the leading, center or trailing edge
/// of the line to the leading, center or trailing edge of the paragraph excluding paragraph indent.
///
///
/// *** Bullet/Auto-numbering ***
/// The application may also specify "bullet" (or "marker") for the line. Marker does not affect the layout measurement of the
/// line. Line with marker has the same line width with the line that has not. The presence of marker however affects the
/// pixel-wise black width of the line. The application specifies the distance from the beginning of the line to the trailing
/// edge of the marker symbol via the property [TextMarkerProperties.Offset]. The application can create the visual effect of
/// having marker embedded inside the body of paragraph text (so-called "marker inside") by specifying a positive indent so
/// that the text starts after the beginning of the line and a positive smaller amount of marker offset to place the marker
/// symbol at between the beginning of the line and the beginning of the text. The "marker outside" visual effect can
/// also be achieved in a similar manner by specifying zero or positive indent value with negative marker offset value.
///
///
/// *** Formatted Line Properties ***
/// Once the line formatting process is completed and a line is returned to the application. The application determines the
/// distance from the paragraph starting point to the actual beginning of the line by looking at the "Line Start" property of
/// the text line [TextLine.Start]. The "Width" of the line can be determined, naturally, from the property [TextLine.Width].
/// The property value [TextLine.OverhangLeading] represents the distance from the beginning of the line, or the line's alignment
/// point, to the first leading pixel of that line so-called the "Black Start". The property [TextLine.OverhangTrailing]
/// is the distance from the last trailing pixel of the line to the trailing edge alignment point of the line. The application
/// uses these "overhang" or "overshoot" values to ensure proper positioning of text that avoids pixel clipping of the
/// glyph image. A less sophisticated application may provide reasonable leading and trailing margin around the text line
/// and ignores these properties altogether.
///
///
/// *** Hit-Testing ***
/// The application may also perform hit-testing by calling methods on TextLine. All the distances involved in hit-testing
/// operations are distances from the paragraph start, not from the line start. Marker symbol on its own is not hit-testable.
///
///
/// *** Tabs ***
/// The application may specify tab stops - an array of positions to where text aligns. Each tab stop may have different
/// "Tab Alignment". The left, center and right tab alignment aligns the tab stop position to the leading, center and the
/// trailing edge of the text following the tab character. "Tab Leader" may also be specified to fill the distance occupied
/// by the presence of tab character with the symbol of choice. Tab stops is specified thru the property [TextParagraph.Tabs].
/// In the absence of tab stops, the application may assume an automatic tab stop - so called "Incremental Tab" specified by
/// the property [TextParagraphProperties.DefaultIncrementalTab]. The property could be overridden, by default the value
/// is set by TextFormatter to 4 em of the paragraph's default font.
///
///
/// *** Line Services Properties ***
/// TextFormatter relies on LS to calculate the distance from the beginning of the line to the beginning of text or "Text Start"
/// and keep it in the private property [this._textStart]. This value is non-zero when 1) the line starts with indentation or
/// 2) the line starts with marker - either bullet or auto-numbering symbol.
///
/// In case of the line with marker, LS also produces the distance from the beginning of the line to the beginning of the marker
/// symbol, but TextFormatter does not retain that distance because marker is outside the line. The application is assumed
/// responsibility to make sure the marker symbol is not going to be clipped out. The application achieves that by manipulating
/// the indent value along with the marker offset value.
///
/// TextFormatter also retains the total "Text Width" value computed by LS in the private property [this._textWidth]. This
/// is the distance from the beginning of the text to the end including all trailing whitespaces at the end of the line. The
/// similar value but with trailing whitespaces excluded is kept in the private property [this._textWidthAtTrailing].
///
/// TextFormatter starts formatting a LS line by assuming the beginning of the line being at an imaginary origin. It then
/// places the starting point of the content depending on whether the line has either marker symbol or indent. The actual
/// mechanism for the placement is in FetchLineProps callback where the value [LsLineProps.durLeft] represents the distance
/// relative to the line's origin where actual content begins. The distances can either be positive or negative. Negative
/// distance runs in the reverse direction from the direction of text flow. When a negative indent or marker offset is
/// specified, durLeft is set to negative distance relative to line start.
///
/// TextFormatter however does not rely on LS for the whole line's text alignment. It always formats LS as if the line is
/// left-aligned. Once the distances of the line are received, it aligns the whole line according to the text alignment setting
/// specified by the application, outside the LS call. The result of this aligning process is a distance from the beginning of
/// the paragraph to the beginning of text and is kept in a private property [this._paragraphToText].
///
/// </remarks>
[SecurityCritical]
internal unsafe void Compute(
FullTextState fullText,
int firstCharIndex,
int paragraphWidth,
FormattedTextSymbols collapsingSymbol,
ref LsLineWidths lineWidths,
LsLInfo* plsLineInfo
)
{
_formatter = fullText.Formatter;
TextStore store = fullText.TextStore;
// obtain position of important distances
_textStart = lineWidths.upStartMainText;
_textWidthAtTrailing = lineWidths.upStartTrailing;
_textWidth = lineWidths.upLimLine;
// append line end collapsing symbol if any
if (collapsingSymbol != null)
{
AppendCollapsingSymbolWidth(TextFormatterImp.RealToIdeal(collapsingSymbol.Width));
}
// make all widths relative to text start
_textWidth -= _textStart;
_textWidthAtTrailing -= _textStart;
// keep the newline character count if any
_cchNewline = store.CchEol;
// count text and dependant characters
_lscpLim = plsLineInfo->cpLimToContinue;
_lastRun = fullText.CountText(_lscpLim, firstCharIndex, out _cchLength);
Debug.Assert(_cchLength > 0);
if ( plsLineInfo->endr != LsEndRes.endrEndPara
&& plsLineInfo->endr != LsEndRes.endrSoftCR)
{
// endrEndPara denotes that the line ends at paragraph end. It is a result of submitting Paragraph Separator to LS.
// endrSoftCR denotes end of line but not end of paragraph. This is a result of submitting Line Separator to LS.
_cchNewline = 0;
if (plsLineInfo->dcpDepend >= 0)
{
// According to SergeyGe [2/16/2006], dcpDepend reported from LS cannot made precise when considering
// the line ending with hyphenation - this is because LS does not have the knowledge about the amount
// of text, after the hyphenation point, being examined by its client during the process of finding
// the right place to hyphenate. LS client must therefore take into account the number of lookahead
// LSCP examined by hyphenator when computing the correct dcpDepend for the line. In our implementation
// it would just mean we take the max of the two values.
int lscpFirstIndependence = Math.Max(
plsLineInfo->cpLimToContinue + plsLineInfo->dcpDepend,
fullText.LscpHyphenationLookAhead
);
fullText.CountText(lscpFirstIndependence, firstCharIndex, out _cchDepend);
_cchDepend -= _cchLength;
}
}
ParaProp pap = store.Pap;
if (_height <= 0)
{
// if height has not been settled,
// calculate line height and baseline offset
if(pap.LineHeight > 0)
{
// Host specifies line height, honor it.
_height = pap.LineHeight;
_baselineOffset = (int)Math.Round(
_height
* pap.DefaultTypeface.Baseline(pap.EmSize, Constants.DefaultIdealToReal, Util.PixelsPerDip, fullText.TextFormattingMode)
/ pap.DefaultTypeface.LineSpacing(pap.EmSize, Constants.DefaultIdealToReal, Util.PixelsPerDip, fullText.TextFormattingMode)
);
}
if(plsLineInfo->dvrMultiLineHeight == int.MaxValue)
{
// Line is empty so text height and text baseline are based on the default typeface;
// it doesn't make sense even for an emtpy line to have zero text height
_textAscent = (int)Math.Round(pap.DefaultTypeface.Baseline(pap.EmSize, Constants.DefaultIdealToReal, Util.PixelsPerDip, fullText.TextFormattingMode));
_textHeight = (int)Math.Round(pap.DefaultTypeface.LineSpacing(pap.EmSize, Constants.DefaultIdealToReal, Util.PixelsPerDip, fullText.TextFormattingMode));
}
else
{
_textAscent = plsLineInfo->dvrAscent;
_textHeight = _textAscent + plsLineInfo->dvrDescent;
if (fullText.VerticalAdjust)
{
// Line requires vertical repositioning of text runs
store.AdjustRunsVerticalOffset(
plsLineInfo->cpLimToContinue - firstCharIndex,
_height,
_baselineOffset,
out _textHeight,
out _textAscent
);
}
}
// if the client hasn't specified a line height then the line height and baseline
// are the same as the text height and text baseline
if (_height <= 0)
{
_height = _textHeight;
_baselineOffset = _textAscent;
}
}
// Text alignment aligns the line to correspondent paragraph alignment start edge
switch(pap.Align)
{
case TextAlignment.Right:
// alignment rule:
// "The sum of paragraph start to line start and line width is equal to paragraph width"
//
// PTL + LW = PW
// (PTT - LTT) + (LTT + TW) = PW
// (thus) PTT = PW - TW
_paragraphToText = paragraphWidth - _textWidthAtTrailing;
break;
case TextAlignment.Center:
// alignment rule:
// "The sum of paragraph start to line start and half the line width is equal to half the paragraph width"
//
// PTL + 0.5*LW = 0.5*PW
// (PTT - LTT) + 0.5*(LTT + TW) = 0.5*PW
// (thus) PTT = 0.5 * (PW + LTT - TW)
_paragraphToText = (int)Math.Round((paragraphWidth + _textStart - _textWidthAtTrailing) * 0.5);
break;
default:
// alignment rule:
// "Paragraph start to line start is paragraph indent"
//
// PTL = PI
// PTT - LTT = PI
// (thus) PTT = PI + LTT
_paragraphToText = pap.ParagraphIndent + _textStart;
break;
}
}
/// <summary>
/// Client to acquire a state at the point where line is broken by line breaking process;
/// can be null when the line ends by the ending of the paragraph. Client may pass this
/// value back to TextFormatter as an input argument to TextFormatter.FormatLine when
/// formatting the next line within the same paragraph.
/// </summary>
/// <remarks>
/// TextLineBreak is a finalizable object which may contain a reference to an unmanaged
/// structure called break record. Break record can be acquired thru ploline. This method
/// acquire break record only when the passing ploline object is not NULL.
///
/// Not all situations requires break record. Single-line formatting without complex text
/// object does not need it, but optimal break session does. For performance reason, we
/// should not produce break record unnecessarily because it makes TextLineBreak become
/// finalizable, which therefore unnecessarily put additional pressure to GC since each
/// finalizable object wakes finalizer thread and requires double GC collections.
/// </remarks>
/// <SecurityNote>
/// Critical - as this calls LoAcquireBreakRecord
/// </SecurityNote>
[SecurityCritical]
internal TextLineBreak GetTextLineBreak(IntPtr ploline)
{
IntPtr pbreakrec = IntPtr.Zero;
if (ploline != IntPtr.Zero)
{
LsErr lserr = UnsafeNativeMethods.LoAcquireBreakRecord(ploline, out pbreakrec);
if (lserr != LsErr.None)
{
TextFormatterContext.ThrowExceptionFromLsError(SR.Get(SRID.AcquireBreakRecordFailure, lserr), lserr);
}
}
if ( _lastRun != null
&& _lastRun.TextModifierScope != null
&& !(_lastRun.TextRun is TextEndOfParagraph))
{
return new TextLineBreak(
_lastRun.TextModifierScope,
new SecurityCriticalDataForSet<IntPtr>(pbreakrec)
);
}
return (pbreakrec != IntPtr.Zero) ? new TextLineBreak(null, new SecurityCriticalDataForSet<IntPtr>(pbreakrec)) : null;
}
/// <summary>
/// Append the ideal width of line end collapsing symbol
/// </summary>
private void AppendCollapsingSymbolWidth(
int symbolIdealWidth
)
{
_textWidth += symbolIdealWidth;
_textWidthAtTrailing += symbolIdealWidth;
}
/// <summary>
/// Client to get the number of text source positions of this line
/// </summary>
public int Length
{
get { return _cchLength; }
}
/// <summary>
/// Client to get the number of characters following the last character
/// of the line that may trigger reformatting of the current line.
/// </summary>
public int DependentLength
{
get { return _cchDepend; }
}
/// <summary>
/// Client to get the number of newline characters at line end
/// </summary>
public int NewlineLength
{
get { return _cchNewline; }
}
/// <summary>
/// Client to get distance from paragraph start to line start
/// </summary>
public double Start
{
get { return _formatter.IdealToReal(_paragraphToText - _textStart); }
}
/// <summary>
/// Client to get the total width of this line
/// </summary>
public double Width
{
get { return _formatter.IdealToReal(_textWidthAtTrailing + _textStart); }
}
/// <summary>
/// Client to get the total width of this line including width of whitespace characters at the end of the line.
/// </summary>
public double WidthIncludingTrailingWhitespace
{
get { return _formatter.IdealToReal(_textWidth + _textStart); }
}
/// <summary>
/// Client to get the height of the line
/// </summary>
public double Height
{
get { return _formatter.IdealToReal(_height); }
}
/// <summary>
/// Client to get the height of the text (or other content) in the line; this property may differ from the Height
/// property if the client specified the line height
/// </summary>
public double TextHeight
{
get { return _formatter.IdealToReal(_textHeight); }
}
/// <summary>
/// Client to get the distance from top to baseline of this text line
/// </summary>
public double Baseline
{
get { return _formatter.IdealToReal(_baselineOffset); }
}
/// <summary>
/// Client to get the distance from the top of the text (or other content) to the baseline of this text line;
/// this property may differ from the Baseline property if the client specified the line height
/// </summary>
public double TextBaseline
{
get { return _formatter.IdealToReal(_textAscent); }
}
/// <summary>
/// Client to get the distance from the before edge of line height
/// to the baseline of marker of the line if any.
/// </summary>
public double MarkerBaseline
{
get { return Baseline; }
}
/// <summary>
/// Client to get the overall height of the list items marker of the line if any.
/// </summary>
public double MarkerHeight
{
get { return Height; }
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Compute
{
using Azure;
using Management;
using Rest;
using Rest.Azure;
using Rest.Azure.OData;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Extension methods for VirtualMachineImagesOperations.
/// </summary>
public static partial class VirtualMachineImagesOperationsExtensions
{
/// <summary>
/// Gets a virtual machine image.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='version'>
/// A valid image SKU version.
/// </param>
public static VirtualMachineImage Get(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, string version)
{
return operations.GetAsync(location, publisherName, offer, skus, version).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a virtual machine image.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='version'>
/// A valid image SKU version.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<VirtualMachineImage> GetAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, string version, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(location, publisherName, offer, skus, version, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of all virtual machine image versions for the specified
/// location, publisher, offer, and SKU.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IList<VirtualMachineImageResource> List(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, ODataQuery<VirtualMachineImageResource> odataQuery = default(ODataQuery<VirtualMachineImageResource>))
{
return operations.ListAsync(location, publisherName, offer, skus, odataQuery).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of all virtual machine image versions for the specified
/// location, publisher, offer, and SKU.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='skus'>
/// A valid image SKU.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, string skus, ODataQuery<VirtualMachineImageResource> odataQuery = default(ODataQuery<VirtualMachineImageResource>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(location, publisherName, offer, skus, odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of virtual machine image offers for the specified location and
/// publisher.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
public static IList<VirtualMachineImageResource> ListOffers(this IVirtualMachineImagesOperations operations, string location, string publisherName)
{
return operations.ListOffersAsync(location, publisherName).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual machine image offers for the specified location and
/// publisher.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListOffersAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListOffersWithHttpMessagesAsync(location, publisherName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of virtual machine image publishers for the specified Azure
/// location.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
public static IList<VirtualMachineImageResource> ListPublishers(this IVirtualMachineImagesOperations operations, string location)
{
return operations.ListPublishersAsync(location).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual machine image publishers for the specified Azure
/// location.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListPublishersAsync(this IVirtualMachineImagesOperations operations, string location, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListPublishersWithHttpMessagesAsync(location, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets a list of virtual machine image SKUs for the specified location,
/// publisher, and offer.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
public static IList<VirtualMachineImageResource> ListSkus(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer)
{
return operations.ListSkusAsync(location, publisherName, offer).GetAwaiter().GetResult();
}
/// <summary>
/// Gets a list of virtual machine image SKUs for the specified location,
/// publisher, and offer.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='location'>
/// The name of a supported Azure region.
/// </param>
/// <param name='publisherName'>
/// A valid image publisher.
/// </param>
/// <param name='offer'>
/// A valid image publisher offer.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IList<VirtualMachineImageResource>> ListSkusAsync(this IVirtualMachineImagesOperations operations, string location, string publisherName, string offer, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListSkusWithHttpMessagesAsync(location, publisherName, offer, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Scripting.Hosting
{
/// <summary>
/// Implements shadow-copying metadata file cache.
/// </summary>
public sealed class MetadataShadowCopyProvider : IDisposable
{
/// <summary>
/// Specialize <see cref="PortableExecutableReference"/> with path being the original path of the copy.
/// Logically this reference represents that file, the fact that we load the image from a copy is an implementation detail.
/// </summary>
private sealed class ShadowCopyReference : PortableExecutableReference
{
private readonly MetadataShadowCopyProvider _provider;
public ShadowCopyReference(MetadataShadowCopyProvider provider, string originalPath, MetadataReferenceProperties properties)
: base(properties, originalPath)
{
Debug.Assert(originalPath != null);
Debug.Assert(provider != null);
_provider = provider;
}
protected override DocumentationProvider CreateDocumentationProvider()
{
// TODO (tomat): use file next to the dll (or shadow copy)
return DocumentationProvider.Default;
}
protected override Metadata GetMetadataImpl()
{
return _provider.GetMetadata(FilePath, Properties.Kind);
}
protected override PortableExecutableReference WithPropertiesImpl(MetadataReferenceProperties properties)
{
return new ShadowCopyReference(_provider, this.FilePath, properties);
}
}
// normalized absolute path
private readonly string _baseDirectory;
// Normalized absolute path to a directory where assemblies are copied. Must contain nothing but shadow-copied assemblies.
// Internal for testing.
internal string ShadowCopyDirectory;
// normalized absolute paths
private readonly ImmutableArray<string> _noShadowCopyDirectories;
private struct CacheEntry<TPublic>
{
public readonly TPublic Public;
public readonly Metadata Private;
public CacheEntry(TPublic @public, Metadata @private)
{
Debug.Assert(@public != null);
Debug.Assert(@private != null);
Public = @public;
Private = @private;
}
}
// Cache for files that are shadow-copied:
// (original path, last write timestamp) -> (public shadow copy, private metadata instance that owns the PE image)
private readonly Dictionary<FileKey, CacheEntry<MetadataShadowCopy>> _shadowCopies = new Dictionary<FileKey, CacheEntry<MetadataShadowCopy>>();
// Cache for files that are not shadow-copied:
// (path, last write timestamp) -> (public metadata, private metadata instance that owns the PE image)
private readonly Dictionary<FileKey, CacheEntry<Metadata>> _noShadowCopyCache = new Dictionary<FileKey, CacheEntry<Metadata>>();
// files that should not be copied:
private HashSet<string> _lazySuppressedFiles;
private object Guard => _shadowCopies;
/// <summary>
/// Creates an instance of <see cref="MetadataShadowCopyProvider"/>.
/// </summary>
/// <param name="directory">The directory to use to store file copies.</param>
/// <param name="noShadowCopyDirectories">Directories to exclude from shadow-copying.</param>
/// <exception cref="ArgumentNullException"><paramref name="directory"/> is null.</exception>
/// <exception cref="ArgumentException"><paramref name="directory"/> is not an absolute path.</exception>
public MetadataShadowCopyProvider(string directory = null, IEnumerable<string> noShadowCopyDirectories = null)
{
if (directory != null)
{
RequireAbsolutePath(directory, nameof(directory));
try
{
_baseDirectory = FileUtilities.NormalizeDirectoryPath(directory);
}
catch (Exception e)
{
throw new ArgumentException(e.Message, nameof(directory));
}
}
else
{
_baseDirectory = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
}
if (noShadowCopyDirectories != null)
{
try
{
_noShadowCopyDirectories = ImmutableArray.CreateRange(noShadowCopyDirectories.Select(FileUtilities.NormalizeDirectoryPath));
}
catch (Exception e)
{
throw new ArgumentException(e.Message, nameof(noShadowCopyDirectories));
}
}
else
{
_noShadowCopyDirectories = ImmutableArray<string>.Empty;
}
}
private static void RequireAbsolutePath(string path, string argumentName)
{
if (path == null)
{
throw new ArgumentNullException(argumentName);
}
if (!PathUtilities.IsAbsolute(path))
{
throw new ArgumentException(ScriptingResources.AbsolutePathExpected, argumentName);
}
}
/// <summary>
/// Determine whether given path is under the shadow-copy directory managed by this shadow-copy provider.
/// </summary>
/// <param name="fullPath">Absolute path.</param>
/// <exception cref="ArgumentNullException"><paramref name="fullPath"/> is null.</exception>
/// <exception cref="ArgumentException"><paramref name="fullPath"/> is not an absolute path.</exception>
public bool IsShadowCopy(string fullPath)
{
RequireAbsolutePath(fullPath, nameof(fullPath));
string directory = ShadowCopyDirectory;
if (directory == null)
{
return false;
}
string normalizedPath;
try
{
normalizedPath = FileUtilities.NormalizeDirectoryPath(fullPath);
}
catch
{
return false;
}
return normalizedPath.StartsWith(directory, StringComparison.OrdinalIgnoreCase);
}
~MetadataShadowCopyProvider()
{
DisposeShadowCopies();
DeleteShadowCopyDirectory();
}
/// <summary>
/// Clears shadow-copy cache, disposes all allocated metadata, and attempts to delete copied files.
/// </summary>
public void Dispose()
{
GC.SuppressFinalize(this);
lock (Guard)
{
DisposeShadowCopies();
_shadowCopies.Clear();
}
DeleteShadowCopyDirectory();
}
private void DisposeShadowCopies()
{
foreach (var entry in _shadowCopies.Values)
{
// metadata file handles have been disposed already, but the xml doc file handle hasn't:
entry.Public.DisposeFileHandles();
// dispose metadata images:
entry.Private.Dispose();
}
}
private void DeleteShadowCopyDirectory()
{
var directory = ShadowCopyDirectory;
if (Directory.Exists(directory))
{
try
{
// First, strip the read-only bit off of any files.
var directoryInfo = new DirectoryInfo(directory);
foreach (var fileInfo in directoryInfo.EnumerateFiles(searchPattern: "*", searchOption: SearchOption.AllDirectories))
{
StripReadOnlyAttributeFromFile(fileInfo);
}
// Second, delete everything.
Directory.Delete(directory, recursive: true);
}
catch
{
}
}
}
private static void StripReadOnlyAttributeFromFile(FileInfo fileInfo)
{
try
{
if (fileInfo.IsReadOnly)
{
fileInfo.IsReadOnly = false;
}
}
catch
{
// There are many reasons this could fail. Just ignore it and move on.
}
}
/// <summary>
/// Gets or creates metadata for specified file path.
/// </summary>
/// <param name="fullPath">Full path to an assembly manifest module file or a standalone module file.</param>
/// <param name="kind">Metadata kind (assembly or module).</param>
/// <returns>Metadata for the specified file.</returns>
/// <exception cref="IOException">Error reading file <paramref name="fullPath"/>. See <see cref="Exception.InnerException"/> for details.</exception>
public Metadata GetMetadata(string fullPath, MetadataImageKind kind)
{
if (NeedsShadowCopy(fullPath))
{
return GetMetadataShadowCopyNoCheck(fullPath, kind).Metadata;
}
FileKey key = FileKey.Create(fullPath);
lock (Guard)
{
CacheEntry<Metadata> existing;
if (_noShadowCopyCache.TryGetValue(key, out existing))
{
return existing.Public;
}
}
Metadata newMetadata;
if (kind == MetadataImageKind.Assembly)
{
newMetadata = AssemblyMetadata.CreateFromFile(fullPath);
}
else
{
newMetadata = ModuleMetadata.CreateFromFile(fullPath);
}
// the files are locked (memory mapped) now
key = FileKey.Create(fullPath);
lock (Guard)
{
CacheEntry<Metadata> existing;
if (_noShadowCopyCache.TryGetValue(key, out existing))
{
newMetadata.Dispose();
return existing.Public;
}
Metadata publicMetadata = newMetadata.Copy();
_noShadowCopyCache.Add(key, new CacheEntry<Metadata>(publicMetadata, newMetadata));
return publicMetadata;
}
}
/// <summary>
/// Gets or creates a copy of specified assembly or standalone module.
/// </summary>
/// <param name="fullPath">Full path to an assembly manifest module file or a standalone module file.</param>
/// <param name="kind">Metadata kind (assembly or module).</param>
/// <returns>
/// Copy of the specified file, or null if the file doesn't need a copy (<see cref="NeedsShadowCopy"/>).
/// Returns the same object if called multiple times with the same path.
/// </returns>
/// <exception cref="ArgumentNullException"><paramref name="fullPath"/> is null.</exception>
/// <exception cref="ArgumentException"><paramref name="fullPath"/> is not an absolute path.</exception>
/// <exception cref="IOException">Error reading file <paramref name="fullPath"/>. See <see cref="Exception.InnerException"/> for details.</exception>
public MetadataShadowCopy GetMetadataShadowCopy(string fullPath, MetadataImageKind kind)
{
return NeedsShadowCopy(fullPath) ? GetMetadataShadowCopyNoCheck(fullPath, kind) : null;
}
private MetadataShadowCopy GetMetadataShadowCopyNoCheck(string fullPath, MetadataImageKind kind)
{
if (kind < MetadataImageKind.Assembly || kind > MetadataImageKind.Module)
{
throw new ArgumentOutOfRangeException(nameof(kind));
}
FileKey key = FileKey.Create(fullPath);
lock (Guard)
{
CacheEntry<MetadataShadowCopy> existing;
if (CopyExistsOrIsSuppressed(key, out existing))
{
return existing.Public;
}
}
CacheEntry<MetadataShadowCopy> newCopy = CreateMetadataShadowCopy(fullPath, kind);
// last-write timestamp is copied from the original file at the time the snapshot was made:
bool fault = true;
try
{
key = new FileKey(fullPath, FileUtilities.GetFileTimeStamp(newCopy.Public.PrimaryModule.FullPath));
fault = false;
}
finally
{
if (fault)
{
newCopy.Private.Dispose();
}
}
lock (Guard)
{
CacheEntry<MetadataShadowCopy> existing;
if (CopyExistsOrIsSuppressed(key, out existing))
{
newCopy.Private.Dispose();
return existing.Public;
}
_shadowCopies.Add(key, newCopy);
}
return newCopy.Public;
}
private bool CopyExistsOrIsSuppressed(FileKey key, out CacheEntry<MetadataShadowCopy> existing)
{
if (_lazySuppressedFiles != null && _lazySuppressedFiles.Contains(key.FullPath))
{
existing = default(CacheEntry<MetadataShadowCopy>);
return true;
}
return _shadowCopies.TryGetValue(key, out existing);
}
/// <exception cref="ArgumentNullException"><paramref name="fullPath"/> is null.</exception>
/// <exception cref="ArgumentException"><paramref name="fullPath"/> is not an absolute path.</exception>
public PortableExecutableReference GetReference(string fullPath, MetadataReferenceProperties properties = default(MetadataReferenceProperties))
{
RequireAbsolutePath(fullPath, nameof(fullPath));
return new ShadowCopyReference(this, fullPath, properties);
}
/// <summary>
/// Suppresses shadow-copying of specified path.
/// </summary>
/// <param name="originalPath">Full path.</param>
/// <exception cref="ArgumentNullException"><paramref name="originalPath"/> is null.</exception>
/// <exception cref="ArgumentException"><paramref name="originalPath"/> is not an absolute path.</exception>
/// <remarks>
/// Doesn't affect files that have already been shadow-copied.
/// </remarks>
public void SuppressShadowCopy(string originalPath)
{
RequireAbsolutePath(originalPath, nameof(originalPath));
lock (Guard)
{
if (_lazySuppressedFiles == null)
{
_lazySuppressedFiles = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
}
_lazySuppressedFiles.Add(originalPath);
}
}
/// <summary>
/// Determines whether given file is a candidate for shadow-copy.
/// </summary>
/// <param name="fullPath">An absolute path.</param>
/// <returns>True if the shadow-copy policy applies to the specified path.</returns>
/// <exception cref="NullReferenceException"><paramref name="fullPath"/> is null.</exception>
/// <exception cref="ArgumentException"><paramref name="fullPath"/> is not absolute.</exception>
public bool NeedsShadowCopy(string fullPath)
{
RequireAbsolutePath(fullPath, nameof(fullPath));
string directory = Path.GetDirectoryName(fullPath);
// do not shadow-copy shadow-copies:
string referencesDir = ShadowCopyDirectory;
if (referencesDir != null && directory.StartsWith(referencesDir, StringComparison.Ordinal))
{
return false;
}
return !_noShadowCopyDirectories.Any(dir => directory.StartsWith(dir, StringComparison.Ordinal));
}
private CacheEntry<MetadataShadowCopy> CreateMetadataShadowCopy(string originalPath, MetadataImageKind kind)
{
int attempts = 10;
while (true)
{
try
{
if (ShadowCopyDirectory == null)
{
ShadowCopyDirectory = CreateUniqueDirectory(_baseDirectory);
}
// Create directory for the assembly.
// If the assembly has any modules they have to be copied to the same directory
// and have the same names as specified in metadata.
string assemblyDir = CreateUniqueDirectory(ShadowCopyDirectory);
string shadowCopyPath = Path.Combine(assemblyDir, Path.GetFileName(originalPath));
ShadowCopy documentationFileCopy = null;
string xmlOriginalPath;
if (XmlFileResolverForAssemblies.TryFindXmlDocumentationFile(originalPath, out xmlOriginalPath))
{
// TODO (tomat): how do doc comments work for multi-module assembly?
var xmlCopyPath = Path.ChangeExtension(shadowCopyPath, ".xml");
var xmlStream = CopyFile(xmlOriginalPath, xmlCopyPath, fileMayNotExist: true);
if (xmlStream != null)
{
documentationFileCopy = new ShadowCopy(xmlStream, xmlOriginalPath, xmlCopyPath);
}
}
var manifestModuleCopyStream = CopyFile(originalPath, shadowCopyPath);
var manifestModuleCopy = new ShadowCopy(manifestModuleCopyStream, originalPath, shadowCopyPath);
Metadata privateMetadata;
if (kind == MetadataImageKind.Assembly)
{
privateMetadata = CreateAssemblyMetadata(manifestModuleCopyStream, originalPath, shadowCopyPath);
}
else
{
privateMetadata = CreateModuleMetadata(manifestModuleCopyStream);
}
var publicMetadata = privateMetadata.Copy();
return new CacheEntry<MetadataShadowCopy>(new MetadataShadowCopy(manifestModuleCopy, documentationFileCopy, publicMetadata), privateMetadata);
}
catch (DirectoryNotFoundException)
{
// the shadow copy directory has been deleted - try to copy all files again
if (!Directory.Exists(ShadowCopyDirectory))
{
ShadowCopyDirectory = null;
if (attempts-- > 0)
{
continue;
}
}
throw;
}
}
}
private AssemblyMetadata CreateAssemblyMetadata(FileStream manifestModuleCopyStream, string originalPath, string shadowCopyPath)
{
// We don't need to use the global metadata cache here since the shadow copy
// won't change and is private to us - only users of the same shadow copy provider see it.
ImmutableArray<ModuleMetadata>.Builder moduleBuilder = null;
bool fault = true;
ModuleMetadata manifestModule = null;
try
{
manifestModule = CreateModuleMetadata(manifestModuleCopyStream);
string originalDirectory = null, shadowCopyDirectory = null;
foreach (string moduleName in manifestModule.GetModuleNames())
{
if (moduleBuilder == null)
{
moduleBuilder = ImmutableArray.CreateBuilder<ModuleMetadata>();
moduleBuilder.Add(manifestModule);
originalDirectory = Path.GetDirectoryName(originalPath);
shadowCopyDirectory = Path.GetDirectoryName(shadowCopyPath);
}
FileStream moduleCopyStream = CopyFile(
originalPath: Path.Combine(originalDirectory, moduleName),
shadowCopyPath: Path.Combine(shadowCopyDirectory, moduleName));
moduleBuilder.Add(CreateModuleMetadata(moduleCopyStream));
}
var modules = (moduleBuilder != null) ? moduleBuilder.ToImmutable() : ImmutableArray.Create(manifestModule);
fault = false;
return AssemblyMetadata.Create(modules);
}
finally
{
if (fault)
{
if (manifestModule != null)
{
manifestModule.Dispose();
}
if (moduleBuilder != null)
{
for (int i = 1; i < moduleBuilder.Count; i++)
{
moduleBuilder[i].Dispose();
}
}
}
}
}
private static ModuleMetadata CreateModuleMetadata(FileStream stream)
{
// The Stream is held by the ModuleMetadata to read metadata on demand.
// We hand off the responsibility for closing the stream to the metadata object.
return ModuleMetadata.CreateFromStream(stream, leaveOpen: false);
}
private string CreateUniqueDirectory(string basePath)
{
int attempts = 10;
while (true)
{
string dir = Path.Combine(basePath, Guid.NewGuid().ToString());
if (File.Exists(dir) || Directory.Exists(dir))
{
// try a different name (guid):
continue;
}
try
{
Directory.CreateDirectory(dir);
return dir;
}
catch (IOException)
{
// Some other process might have created a file of the same name after we checked for its existence.
if (File.Exists(dir))
{
continue;
}
// This file might also have been deleted by now. So try again for a while and then give up.
if (--attempts == 0)
{
throw;
}
}
}
}
private FileStream CopyFile(string originalPath, string shadowCopyPath, bool fileMayNotExist = false)
{
try
{
File.Copy(originalPath, shadowCopyPath, overwrite: true);
StripReadOnlyAttributeFromFile(new FileInfo(shadowCopyPath));
return new FileStream(shadowCopyPath, FileMode.Open, FileAccess.Read, FileShare.Read);
}
catch (FileNotFoundException)
{
if (!fileMayNotExist)
{
throw;
}
}
return null;
}
#region Test hooks
// for testing only
internal int CacheSize
{
get { return _shadowCopies.Count; }
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Dynamic.Utils;
using System.Reflection;
namespace System.Linq.Expressions.Interpreter
{
internal abstract class LessThanInstruction : Instruction
{
private readonly object _nullValue;
private static Instruction s_SByte, s_Int16, s_Char, s_Int32, s_Int64, s_Byte, s_UInt16, s_UInt32, s_UInt64, s_Single, s_Double;
private static Instruction s_liftedToNullSByte, s_liftedToNullInt16, s_liftedToNullChar, s_liftedToNullInt32, s_liftedToNullInt64, s_liftedToNullByte, s_liftedToNullUInt16, s_liftedToNullUInt32, s_liftedToNullUInt64, s_liftedToNullSingle, s_liftedToNullDouble;
public override int ConsumedStack => 2;
public override int ProducedStack => 1;
public override string InstructionName => "LessThan";
private LessThanInstruction(object nullValue)
{
_nullValue = nullValue;
}
private sealed class LessThanSByte : LessThanInstruction
{
public LessThanSByte(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((sbyte)left < (sbyte)right);
}
return 1;
}
}
private sealed class LessThanInt16 : LessThanInstruction
{
public LessThanInt16(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((short)left < (short)right);
}
return 1;
}
}
private sealed class LessThanChar : LessThanInstruction
{
public LessThanChar(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((char)left < (char)right);
}
return 1;
}
}
private sealed class LessThanInt32 : LessThanInstruction
{
public LessThanInt32(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((int)left < (int)right);
}
return 1;
}
}
private sealed class LessThanInt64 : LessThanInstruction
{
public LessThanInt64(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((long)left < (long)right);
}
return 1;
}
}
private sealed class LessThanByte : LessThanInstruction
{
public LessThanByte(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((byte)left < (byte)right);
}
return 1;
}
}
private sealed class LessThanUInt16 : LessThanInstruction
{
public LessThanUInt16(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((ushort)left < (ushort)right);
}
return 1;
}
}
private sealed class LessThanUInt32 : LessThanInstruction
{
public LessThanUInt32(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((uint)left < (uint)right);
}
return 1;
}
}
private sealed class LessThanUInt64 : LessThanInstruction
{
public LessThanUInt64(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((ulong)left < (ulong)right);
}
return 1;
}
}
private sealed class LessThanSingle : LessThanInstruction
{
public LessThanSingle(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((float)left < (float)right);
}
return 1;
}
}
private sealed class LessThanDouble : LessThanInstruction
{
public LessThanDouble(object nullValue)
: base(nullValue)
{
}
public override int Run(InterpretedFrame frame)
{
object right = frame.Pop();
object left = frame.Pop();
if (left == null || right == null)
{
frame.Push(_nullValue);
}
else
{
frame.Push((double)left < (double)right);
}
return 1;
}
}
public static Instruction Create(Type type, bool liftedToNull = false)
{
Debug.Assert(!type.IsEnum);
if (liftedToNull)
{
switch (type.GetNonNullableType().GetTypeCode())
{
case TypeCode.SByte: return s_liftedToNullSByte ?? (s_liftedToNullSByte = new LessThanSByte(null));
case TypeCode.Int16: return s_liftedToNullInt16 ?? (s_liftedToNullInt16 = new LessThanInt16(null));
case TypeCode.Char: return s_liftedToNullChar ?? (s_liftedToNullChar = new LessThanChar(null));
case TypeCode.Int32: return s_liftedToNullInt32 ?? (s_liftedToNullInt32 = new LessThanInt32(null));
case TypeCode.Int64: return s_liftedToNullInt64 ?? (s_liftedToNullInt64 = new LessThanInt64(null));
case TypeCode.Byte: return s_liftedToNullByte ?? (s_liftedToNullByte = new LessThanByte(null));
case TypeCode.UInt16: return s_liftedToNullUInt16 ?? (s_liftedToNullUInt16 = new LessThanUInt16(null));
case TypeCode.UInt32: return s_liftedToNullUInt32 ?? (s_liftedToNullUInt32 = new LessThanUInt32(null));
case TypeCode.UInt64: return s_liftedToNullUInt64 ?? (s_liftedToNullUInt64 = new LessThanUInt64(null));
case TypeCode.Single: return s_liftedToNullSingle ?? (s_liftedToNullSingle = new LessThanSingle(null));
case TypeCode.Double: return s_liftedToNullDouble ?? (s_liftedToNullDouble = new LessThanDouble(null));
default:
throw Error.ExpressionNotSupportedForType("LessThan", type);
}
}
else
{
switch (type.GetNonNullableType().GetTypeCode())
{
case TypeCode.SByte: return s_SByte ?? (s_SByte = new LessThanSByte(Utils.BoxedFalse));
case TypeCode.Int16: return s_Int16 ?? (s_Int16 = new LessThanInt16(Utils.BoxedFalse));
case TypeCode.Char: return s_Char ?? (s_Char = new LessThanChar(Utils.BoxedFalse));
case TypeCode.Int32: return s_Int32 ?? (s_Int32 = new LessThanInt32(Utils.BoxedFalse));
case TypeCode.Int64: return s_Int64 ?? (s_Int64 = new LessThanInt64(Utils.BoxedFalse));
case TypeCode.Byte: return s_Byte ?? (s_Byte = new LessThanByte(Utils.BoxedFalse));
case TypeCode.UInt16: return s_UInt16 ?? (s_UInt16 = new LessThanUInt16(Utils.BoxedFalse));
case TypeCode.UInt32: return s_UInt32 ?? (s_UInt32 = new LessThanUInt32(Utils.BoxedFalse));
case TypeCode.UInt64: return s_UInt64 ?? (s_UInt64 = new LessThanUInt64(Utils.BoxedFalse));
case TypeCode.Single: return s_Single ?? (s_Single = new LessThanSingle(Utils.BoxedFalse));
case TypeCode.Double: return s_Double ?? (s_Double = new LessThanDouble(Utils.BoxedFalse));
default:
throw Error.ExpressionNotSupportedForType("LessThan", type);
}
}
}
}
}
| |
/*
* SubSonic - http://subsonicproject.com
*
* The contents of this file are subject to the Mozilla Public
* License Version 1.1 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an
* "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*/
using System;
using System.ComponentModel;
using System.Data;
using System.Web.UI;
using System.Web.UI.WebControls;
using SubSonic.Utilities;
namespace SubSonic
{
/// <summary>
/// Summary for the DropDown class
/// </summary>
[DefaultProperty("Text")]
[ToolboxData("<{0}:DropDown runat=server></{0}:DropDown>")]
public class DropDown : DropDownList
{
private string orderField = String.Empty;
private string promptText = "--Please Select--";
private string promptValue = String.Empty;
private string providerName = String.Empty;
private bool showPrompt;
private string tableName = String.Empty;
private string textField = String.Empty;
private string valueField = String.Empty;
private string whereField = String.Empty;
private string whereValue = String.Empty;
/// <summary>
/// Gets or sets the prompt text.
/// </summary>
/// <value>The prompt text.</value>
public string PromptText
{
get { return promptText; }
set { promptText = value; }
}
/// <summary>
/// Gets or sets a value indicating whether [show prompt].
/// </summary>
/// <value><c>true</c> if [show prompt]; otherwise, <c>false</c>.</value>
public bool ShowPrompt
{
get { return showPrompt; }
set { showPrompt = value; }
}
/// <summary>
/// Gets or sets the prompt value.
/// </summary>
/// <value>The prompt value.</value>
public string PromptValue
{
get { return promptValue; }
set { promptValue = value; }
}
/// <summary>
/// Gets or sets the name of the provider.
/// </summary>
/// <value>The name of the provider.</value>
public string ProviderName
{
get { return providerName; }
set { providerName = value; }
}
/// <summary>
/// Gets or sets the name of the table.
/// </summary>
/// <value>The name of the table.</value>
public string TableName
{
get { return tableName; }
set { tableName = value; }
}
/// <summary>
/// Gets or sets the text field.
/// </summary>
/// <value>The text field.</value>
public string TextField
{
get { return textField; }
set { textField = value; }
}
/// <summary>
/// Gets or sets the value field.
/// </summary>
/// <value>The value field.</value>
public string ValueField
{
get { return valueField; }
set { valueField = value; }
}
/// <summary>
/// Gets or sets the order field.
/// </summary>
/// <value>The order field.</value>
public string OrderField
{
get { return orderField; }
set { orderField = value; }
}
/// <summary>
/// Gets or sets the where field.
/// </summary>
/// <value>The where field.</value>
public string WhereField
{
get { return whereField; }
set { whereField = value; }
}
/// <summary>
/// Gets or sets the where value.
/// </summary>
/// <value>The where value.</value>
public string WhereValue
{
get { return whereValue; }
set { whereValue = value; }
}
/// <summary>
/// Handles the <see cref="E:System.Web.UI.Control.Init"/> event.
/// </summary>
/// <param name="e">An <see cref="T:System.EventArgs"/> object that contains the event data.</param>
protected override void OnInit(EventArgs e)
{
base.OnInit(e);
if(!DesignMode)
{
// load em up
// cheap way to check for load state
if(Items.Count == 0)
{
if(!String.IsNullOrEmpty(tableName))
{
DataProvider provider = DataService.GetInstance(ProviderName);
SqlQuery q = new Select(provider).From(tableName);
q.CheckLogicalDelete();
if(String.IsNullOrEmpty(valueField) || String.IsNullOrEmpty(textField))
{
// look it up using the table schema
TableSchema.Table tbl = DataService.GetSchema(tableName, providerName, TableType.Table);
if(tbl != null)
{
if(String.IsNullOrEmpty(valueField))
valueField = tbl.PrimaryKey.ColumnName;
if(String.IsNullOrEmpty(textField))
textField = tbl.Columns.Count > 1 ? tbl.Columns[1].ColumnName : tbl.Columns[0].ColumnName;
}
else
throw new Exception("Table name '" + tableName + "' using Provider '" + providerName + "' doesn't work");
}
q.SelectColumnList = new[] {valueField, textField};
if(!String.IsNullOrEmpty(OrderField))
q.OrderAsc(OrderField);
else
q.OrderAsc(textField);
if(!String.IsNullOrEmpty(WhereField))
q.Where(WhereField).IsEqualTo(WhereValue);
IDataReader rdr = null;
try
{
rdr = q.ExecuteReader();
while(rdr.Read())
{
ListItem item = new ListItem(rdr[1].ToString(), rdr[0].ToString());
Items.Add(item);
}
}
catch(DataException x)
{
throw new Exception("Error loading up ListItems for " + ClientID + ": " + x.Message);
}
finally
{
if(rdr != null)
rdr.Close();
}
ListItem prompt = new ListItem(promptText, PromptValue);
if(showPrompt)
Items.Insert(0, prompt);
if(!String.IsNullOrEmpty(SelectedValue))
{
foreach(ListItem item in Items)
{
if(Utility.IsMatch(item.Value, SelectedValue))
{
item.Selected = true;
break;
}
}
}
}
}
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Globalization;
using System.Threading;
using Xunit;
namespace System.Numerics.Tests
{
public class parseTest
{
private readonly static int s_samples = 10;
private readonly static Random s_random = new Random(100);
// Invariant culture is commonly used for (de-)serialization and similar to en-US
// Ukrainian (Ukraine) added to catch regressions (issue #1642)
// Current cultue to get additional value out of glob/loc test runs
public static IEnumerable<object[]> Cultures
{
get
{
yield return new object[] { CultureInfo.InvariantCulture };
yield return new object[] { new CultureInfo("uk-UA") };
if (CultureInfo.CurrentCulture.ToString() != "uk-UA")
yield return new object[] { CultureInfo.CurrentCulture };
}
}
[Theory]
[MemberData(nameof(Cultures))]
[OuterLoop]
public static void RunParseToStringTests(CultureInfo culture)
{
CultureInfo originalCulture = Thread.CurrentThread.CurrentCulture;
try
{
byte[] tempByteArray1 = new byte[0];
Thread.CurrentThread.CurrentCulture = culture;
//default style
VerifyDefaultParse(s_random);
//single NumberStyles
VerifyNumberStyles(NumberStyles.None, s_random);
VerifyNumberStyles(NumberStyles.AllowLeadingWhite, s_random);
VerifyNumberStyles(NumberStyles.AllowTrailingWhite, s_random);
VerifyNumberStyles(NumberStyles.AllowLeadingSign, s_random);
VerifyNumberStyles(NumberStyles.AllowTrailingSign, s_random);
VerifyNumberStyles(NumberStyles.AllowParentheses, s_random);
VerifyNumberStyles(NumberStyles.AllowDecimalPoint, s_random);
VerifyNumberStyles(NumberStyles.AllowThousands, s_random);
VerifyNumberStyles(NumberStyles.AllowExponent, s_random);
VerifyNumberStyles(NumberStyles.AllowCurrencySymbol, s_random);
VerifyNumberStyles(NumberStyles.AllowHexSpecifier, s_random);
//composite NumberStyles
VerifyNumberStyles(NumberStyles.Integer, s_random);
VerifyNumberStyles(NumberStyles.HexNumber, s_random);
VerifyNumberStyles(NumberStyles.Number, s_random);
VerifyNumberStyles(NumberStyles.Float, s_random);
VerifyNumberStyles(NumberStyles.Currency, s_random);
VerifyNumberStyles(NumberStyles.Any, s_random);
//invalid number style
// ******InvalidNumberStyles
NumberStyles invalid = (NumberStyles)0x7c00;
Assert.Throws<ArgumentException>(() =>
{
BigInteger.Parse("1", invalid).ToString("d");
});
Assert.Throws<ArgumentException>(() =>
{
BigInteger junk;
BigInteger.TryParse("1", invalid, null, out junk);
Assert.Equal(junk.ToString("d"), "1");
});
//FormatProvider tests
RunFormatProviderParseStrings();
}
finally
{
Thread.CurrentThread.CurrentCulture = originalCulture;
}
}
private static void RunFormatProviderParseStrings()
{
NumberFormatInfo nfi = new NumberFormatInfo();
nfi = MarkUp(nfi);
//Currencies
// ***************************
// *** FormatProvider - Currencies
// ***************************
VerifyFormatParse("@ 12#34#56!", NumberStyles.Any, nfi, new BigInteger(123456));
VerifyFormatParse("(12#34#56!@)", NumberStyles.Any, nfi, new BigInteger(-123456));
//Numbers
// ***************************
// *** FormatProvider - Numbers
// ***************************
VerifySimpleFormatParse(">1234567", nfi, new BigInteger(1234567));
VerifySimpleFormatParse("<1234567", nfi, new BigInteger(-1234567));
VerifyFormatParse("123&4567^", NumberStyles.Any, nfi, new BigInteger(1234567));
VerifyFormatParse("123&4567^ <", NumberStyles.Any, nfi, new BigInteger(-1234567));
}
public static void VerifyDefaultParse(Random random)
{
// BasicTests
VerifyFailParseToString(null, typeof(ArgumentNullException));
VerifyFailParseToString(String.Empty, typeof(FormatException));
VerifyParseToString("0");
VerifyParseToString("000");
VerifyParseToString("1");
VerifyParseToString("001");
// SimpleNumbers - Small
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 10, random));
}
// SimpleNumbers - Large
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(100, 1000, random));
}
// Leading White
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString("\u0009\u0009\u0009" + GetDigitSequence(1, 100, random));
VerifyParseToString("\u000A\u000A\u000A" + GetDigitSequence(1, 100, random));
VerifyParseToString("\u000B\u000B\u000B" + GetDigitSequence(1, 100, random));
VerifyParseToString("\u000C\u000C\u000C" + GetDigitSequence(1, 100, random));
VerifyParseToString("\u000D\u000D\u000D" + GetDigitSequence(1, 100, random));
VerifyParseToString("\u0020\u0020\u0020" + GetDigitSequence(1, 100, random));
}
// Trailing White
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u0009\u0009\u0009");
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000A\u000A\u000A");
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000B\u000B\u000B");
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000C\u000C\u000C");
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000D\u000D\u000D");
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u0020\u0020\u0020");
}
// Leading Sign
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(CultureInfo.CurrentCulture.NumberFormat.NegativeSign + GetDigitSequence(1, 100, random));
VerifyParseToString(CultureInfo.CurrentCulture.NumberFormat.PositiveSign + GetDigitSequence(1, 100, random));
}
// Trailing Sign
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.NegativeSign, typeof(FormatException));
VerifyFailParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.PositiveSign, typeof(FormatException));
}
// Parentheses
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString("(" + GetDigitSequence(1, 100, random) + ")", typeof(FormatException));
}
// Decimal Point - end
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, typeof(FormatException));
}
// Decimal Point - middle
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator + "000", typeof(FormatException));
}
// Decimal Point - non-zero decimal
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator + GetDigitSequence(20, 25, random), typeof(FormatException));
}
// Thousands
for (int i = 0; i < s_samples; i++)
{
int[] sizes = null;
string seperator = null;
string digits = null;
sizes = CultureInfo.CurrentCulture.NumberFormat.NumberGroupSizes;
seperator = CultureInfo.CurrentCulture.NumberFormat.NumberGroupSeparator;
digits = GenerateGroups(sizes, seperator, random);
VerifyFailParseToString(digits, typeof(FormatException));
}
// Exponent
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetDigitSequence(1, 100, random) + "e" + CultureInfo.CurrentCulture.NumberFormat.PositiveSign + GetDigitSequence(1, 3, random), typeof(FormatException));
VerifyFailParseToString(GetDigitSequence(1, 100, random) + "e" + CultureInfo.CurrentCulture.NumberFormat.NegativeSign + GetDigitSequence(1, 3, random), typeof(FormatException));
}
// Currency Symbol
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(CultureInfo.CurrentCulture.NumberFormat.CurrencySymbol + GetDigitSequence(1, 100, random), typeof(FormatException));
}
// Hex Specifier
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetHexDigitSequence(1, 100, random), typeof(FormatException));
}
// Invalid Chars
for (int i = 0; i < s_samples; i++)
{
VerifyFailParseToString(GetDigitSequence(1, 50, random) + GetRandomInvalidChar(random) + GetDigitSequence(1, 50, random), typeof(FormatException));
}
}
public static void VerifyNumberStyles(NumberStyles ns, Random random)
{
VerifyParseToString(null, ns, false, null);
VerifyParseToString(String.Empty, ns, false);
VerifyParseToString("0", ns, true);
VerifyParseToString("000", ns, true);
VerifyParseToString("1", ns, true);
VerifyParseToString("001", ns, true);
// SimpleNumbers - Small
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 10, random), ns, true);
}
// SimpleNumbers - Large
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(100, 1000, random), ns, true);
}
// Leading White
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString("\u0009\u0009\u0009" + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingWhite) != 0));
VerifyParseToString("\u000A\u000A\u000A" + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingWhite) != 0));
VerifyParseToString("\u000B\u000B\u000B" + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingWhite) != 0));
VerifyParseToString("\u000C\u000C\u000C" + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingWhite) != 0));
VerifyParseToString("\u000D\u000D\u000D" + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingWhite) != 0));
VerifyParseToString("\u0020\u0020\u0020" + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingWhite) != 0));
}
// Trailing White
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u0009\u0009\u0009", ns, FailureNotExpectedForTrailingWhite(ns, false));
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000A\u000A\u000A", ns, FailureNotExpectedForTrailingWhite(ns, false));
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000B\u000B\u000B", ns, FailureNotExpectedForTrailingWhite(ns, false));
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000C\u000C\u000C", ns, FailureNotExpectedForTrailingWhite(ns, false));
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u000D\u000D\u000D", ns, FailureNotExpectedForTrailingWhite(ns, false));
VerifyParseToString(GetDigitSequence(1, 100, random) + "\u0020\u0020\u0020", ns, FailureNotExpectedForTrailingWhite(ns, true));
}
// Leading Sign
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(CultureInfo.CurrentCulture.NumberFormat.NegativeSign + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingSign) != 0));
VerifyParseToString(CultureInfo.CurrentCulture.NumberFormat.PositiveSign + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowLeadingSign) != 0));
}
// Trailing Sign
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.NegativeSign, ns, ((ns & NumberStyles.AllowTrailingSign) != 0));
VerifyParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.PositiveSign, ns, ((ns & NumberStyles.AllowTrailingSign) != 0));
}
// Parentheses
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString("(" + GetDigitSequence(1, 100, random) + ")", ns, ((ns & NumberStyles.AllowParentheses) != 0));
}
// Decimal Point - end
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 100, random) + CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator, ns, ((ns & NumberStyles.AllowDecimalPoint) != 0));
}
// Decimal Point - middle
for (int i = 0; i < s_samples; i++)
{
string digits = GetDigitSequence(1, 100, random);
VerifyParseToString(digits + CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator + "000", ns, ((ns & NumberStyles.AllowDecimalPoint) != 0), digits);
}
// Decimal Point - non-zero decimal
for (int i = 0; i < s_samples; i++)
{
string digits = GetDigitSequence(1, 100, random);
VerifyParseToString(digits + CultureInfo.CurrentCulture.NumberFormat.NumberDecimalSeparator + GetDigitSequence(20, 25, random), ns, false, digits);
}
// Thousands
for (int i = 0; i < s_samples; i++)
{
int[] sizes = null;
string seperator = null;
string digits = null;
sizes = CultureInfo.CurrentCulture.NumberFormat.NumberGroupSizes;
seperator = CultureInfo.CurrentCulture.NumberFormat.NumberGroupSeparator;
digits = GenerateGroups(sizes, seperator, random);
VerifyParseToString(digits, ns, ((ns & NumberStyles.AllowThousands) != 0));
}
// Exponent
for (int i = 0; i < s_samples; i++)
{
string digits = GetDigitSequence(1, 100, random);
string exp = GetDigitSequence(1, 3, random);
int expValue = Int32.Parse(exp);
string zeros = new string('0', expValue);
//Positive Exponents
VerifyParseToString(digits + "e" + CultureInfo.CurrentCulture.NumberFormat.PositiveSign + exp, ns, ((ns & NumberStyles.AllowExponent) != 0), digits + zeros);
//Negative Exponents
bool valid = ((ns & NumberStyles.AllowExponent) != 0);
for (int j = digits.Length; (valid && (j > 0) && (j > digits.Length - expValue)); j--)
{
if (digits[j - 1] != '0')
{
valid = false;
}
}
if (digits.Length - Int32.Parse(exp) > 0)
{
VerifyParseToString(digits + "e" + CultureInfo.CurrentCulture.NumberFormat.NegativeSign + exp, ns, valid, digits.Substring(0, digits.Length - Int32.Parse(exp)));
}
else
{
VerifyParseToString(digits + "e" + CultureInfo.CurrentCulture.NumberFormat.NegativeSign + exp, ns, valid, "0");
}
}
// Currency Symbol
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(CultureInfo.CurrentCulture.NumberFormat.CurrencySymbol + GetDigitSequence(1, 100, random), ns, ((ns & NumberStyles.AllowCurrencySymbol) != 0));
}
// Hex Specifier
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetHexDigitSequence(1, 15, random) + "A", ns, ((ns & NumberStyles.AllowHexSpecifier) != 0));
}
// Invalid Chars
for (int i = 0; i < s_samples; i++)
{
VerifyParseToString(GetDigitSequence(1, 100, random) + GetRandomInvalidChar(random) + GetDigitSequence(1, 10, random), ns, false);
}
}
private static void VerifyParseToString(string num1)
{
BigInteger test;
Eval(BigInteger.Parse(num1), Fix(num1.Trim()));
Assert.True(BigInteger.TryParse(num1, out test));
Eval(test, Fix(num1.Trim()));
}
private static void VerifyFailParseToString(string num1, Type expectedExceptionType)
{
BigInteger test;
Assert.False(BigInteger.TryParse(num1, out test), String.Format("Expected TryParse to fail on {0}", num1));
if (num1 == null)
{
Assert.Throws<ArgumentNullException>(() => { BigInteger.Parse(num1).ToString("d"); });
}
else
{
Assert.Throws<FormatException>(() => { BigInteger.Parse(num1).ToString("d"); });
}
}
private static void VerifyParseToString(string num1, NumberStyles ns, bool failureNotExpected)
{
VerifyParseToString(num1, ns, failureNotExpected, Fix(num1.Trim(), ((ns & NumberStyles.AllowHexSpecifier) != 0), failureNotExpected));
}
private static void VerifyParseToString(string num1, NumberStyles ns, bool failureNotExpected, string expected)
{
BigInteger test;
if (failureNotExpected)
{
Eval(BigInteger.Parse(num1, ns), expected);
Assert.True(BigInteger.TryParse(num1, ns, null, out test));
Eval(test, expected);
}
else
{
if (num1 == null)
{
Assert.Throws<ArgumentNullException>(() => { BigInteger.Parse(num1, ns); });
}
else
{
Assert.Throws<FormatException>(() => { BigInteger.Parse(num1, ns); });
}
Assert.False(BigInteger.TryParse(num1, ns, null, out test), String.Format("Expected TryParse to fail on {0}", num1));
}
}
private static void VerifySimpleFormatParse(string num1, NumberFormatInfo nfi, BigInteger expected, bool failureExpected = false)
{
BigInteger test;
if (!failureExpected)
{
Assert.Equal(expected, BigInteger.Parse(num1, nfi));
Assert.True(BigInteger.TryParse(num1, NumberStyles.Any, nfi, out test));
Assert.Equal(expected, test);
}
else
{
Assert.Throws<FormatException>(() => { BigInteger.Parse(num1, nfi); });
Assert.False(BigInteger.TryParse(num1, NumberStyles.Any, nfi, out test), String.Format("Expected TryParse to fail on {0}", num1));
}
}
private static void VerifyFormatParse(string num1, NumberStyles ns, NumberFormatInfo nfi, BigInteger expected, bool failureExpected = false)
{
BigInteger test;
if (!failureExpected)
{
Assert.Equal(expected, BigInteger.Parse(num1, ns, nfi));
Assert.True(BigInteger.TryParse(num1, NumberStyles.Any, nfi, out test));
Assert.Equal(expected, test);
}
else
{
Assert.Throws<FormatException>(() => { BigInteger.Parse(num1, ns, nfi); });
Assert.False(BigInteger.TryParse(num1, ns, nfi, out test), String.Format("Expected TryParse to fail on {0}", num1));
}
}
private static String GetDigitSequence(int min, int max, Random random)
{
String result = String.Empty;
String[] digits = new String[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" };
int size = random.Next(min, max);
for (int i = 0; i < size; i++)
{
result += digits[random.Next(0, digits.Length)];
if (i == 0)
{
while (result == "0")
{
result = digits[random.Next(0, digits.Length)];
}
}
}
return result;
}
private static String GetHexDigitSequence(int min, int max, Random random)
{
String result = String.Empty;
String[] digits = new String[] { "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "a", "b", "c", "d", "e", "f" };
int size = random.Next(min, max);
bool hasHexCharacter = false;
while (!hasHexCharacter)
{
for (int i = 0; i < size; i++)
{
int j = random.Next(0, digits.Length);
result += digits[j];
if (j > 9)
{
hasHexCharacter = true;
}
}
}
return result;
}
private static String GetRandomInvalidChar(Random random)
{
Char[] digits = new Char[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F' };
Char result = '5';
while (result == '5')
{
result = unchecked((Char)random.Next());
for (int i = 0; i < digits.Length; i++)
{
if (result == (char)digits[i])
{
result = '5';
}
}
// Remove the comma: 'AllowThousands' NumberStyle does not enforce the GroupSizes.
if (result == ',')
{
result = '5';
}
}
String res = new String(result, 1);
return res;
}
private static String Fix(String input)
{
return Fix(input, false);
}
private static String Fix(String input, bool isHex)
{
return Fix(input, isHex, true);
}
private static String Fix(String input, bool isHex, bool failureNotExpected)
{
String output = input;
if (failureNotExpected)
{
if (isHex)
{
output = ConvertHexToDecimal(output);
}
while (output.StartsWith("0") & (output.Length > 1))
{
output = output.Substring(1);
}
List<Char> out2 = new List<Char>();
for (int i = 0; i < output.Length; i++)
{
if ((output[i] >= '0') & (output[i] <= '9'))
{
out2.Add(output[i]);
}
}
output = new String(out2.ToArray());
}
return output;
}
private static String ConvertHexToDecimal(string input)
{
char[] inArr = input.ToCharArray();
bool isNeg = false;
if (inArr.Length > 0)
{
if (Int32.Parse("0" + inArr[0], NumberStyles.AllowHexSpecifier) > 7)
{
isNeg = true;
for (int i = 0; i < inArr.Length; i++)
{
int digit = Int32.Parse("0" + inArr[i], NumberStyles.AllowHexSpecifier);
digit = 15 - digit;
inArr[i] = digit.ToString("x")[0];
}
}
}
BigInteger x = 0;
BigInteger baseNum = 1;
for (int i = inArr.Length - 1; i >= 0; i--)
{
try
{
BigInteger x2 = (Int32.Parse(new string(new char[] { inArr[i] }), NumberStyles.AllowHexSpecifier) * baseNum);
x = x + x2;
}
catch (FormatException)
{
// left blank char is not a hex character;
}
baseNum = baseNum * 16;
}
if (isNeg)
{
x = x + 1;
}
List<char> number = new List<char>();
if (x == 0)
{
number.Add('0');
}
else
{
while (x > 0)
{
number.Add((x % 10).ToString().ToCharArray()[0]);
x = x / 10;
}
number.Reverse();
}
String y2 = new String(number.ToArray());
if (isNeg)
{
y2 = CultureInfo.CurrentCulture.NumberFormat.NegativeSign.ToCharArray() + y2;
}
return y2;
}
private static String GenerateGroups(int[] sizes, string seperator, Random random)
{
List<int> total_sizes = new List<int>();
int total;
int num_digits = random.Next(10, 100);
string digits = String.Empty;
total = 0;
total_sizes.Add(0);
for (int j = 0; ((j < (sizes.Length - 1)) && (total < 101)); j++)
{
total += sizes[j];
total_sizes.Add(total);
}
if (total < 101)
{
if (sizes[sizes.Length - 1] == 0)
{
total_sizes.Add(101);
}
else
{
while (total < 101)
{
total += sizes[sizes.Length - 1];
total_sizes.Add(total);
}
}
}
bool first = true;
for (int j = total_sizes.Count - 1; j > 0; j--)
{
if ((first) && (total_sizes[j] >= num_digits))
{
continue;
}
int group_size = num_digits - total_sizes[j - 1];
if (first)
{
digits += GetDigitSequence(group_size, group_size, random);
first = false;
}
else
{
//Generate an extra character since the first character of GetDigitSequence is non-zero.
digits += GetDigitSequence(group_size + 1, group_size + 1, random).Substring(1);
}
num_digits -= group_size;
if (num_digits > 0)
{
digits += seperator;
}
}
return digits;
}
private static NumberFormatInfo MarkUp(NumberFormatInfo nfi)
{
nfi.CurrencyDecimalDigits = 0;
nfi.CurrencyDecimalSeparator = "!";
nfi.CurrencyGroupSeparator = "#";
nfi.CurrencyGroupSizes = new int[] { 2 };
nfi.CurrencyNegativePattern = 4;
nfi.CurrencyPositivePattern = 2;
nfi.CurrencySymbol = "@";
nfi.NumberDecimalDigits = 0;
nfi.NumberDecimalSeparator = "^";
nfi.NumberGroupSeparator = "&";
nfi.NumberGroupSizes = new int[] { 4 };
nfi.NumberNegativePattern = 4;
nfi.PercentDecimalDigits = 0;
nfi.PercentDecimalSeparator = "*";
nfi.PercentGroupSeparator = "+";
nfi.PercentGroupSizes = new int[] { 5 };
nfi.PercentNegativePattern = 2;
nfi.PercentPositivePattern = 2;
nfi.PercentSymbol = "?";
nfi.PerMilleSymbol = "~";
nfi.NegativeSign = "<";
nfi.PositiveSign = ">";
return nfi;
}
// We need to account for cultures like fr-FR and uk-UA that use the no-break space (NBSP, 0xA0)
// character as the group separator. Because NBSP cannot be (easily) entered by the end user we
// accept regular spaces (SP, 0x20) as group separators for those cultures which means that
// trailing SP characters will be interpreted as group separators rather than whitespace.
//
// See also System.Globalization.FormatProvider+Number.MatchChars(char*, char*)
private static bool FailureNotExpectedForTrailingWhite(NumberStyles ns, bool spaceOnlyTrail)
{
if (spaceOnlyTrail && (ns & NumberStyles.AllowThousands) != 0)
{
if ((ns & NumberStyles.AllowCurrencySymbol) != 0)
{
if (CultureInfo.CurrentCulture.NumberFormat.CurrencyGroupSeparator == "\u00A0")
return true;
}
else
{
if (CultureInfo.CurrentCulture.NumberFormat.NumberGroupSeparator == "\u00A0")
return true;
}
}
return (ns & NumberStyles.AllowTrailingWhite) != 0;
}
public static void Eval(BigInteger x, String expected)
{
bool IsPos = (x >= 0);
if (!IsPos)
{
x = -x;
}
if (x == 0)
{
Assert.Equal(expected, "0");
}
else
{
List<char> number = new List<char>();
while (x > 0)
{
number.Add((x % 10).ToString().ToCharArray()[0]);
x = x / 10;
}
number.Reverse();
String actual = new String(number.ToArray());
Assert.Equal(expected, actual);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Microsoft.Xna.Framework;
namespace PokeD.CPGL.Components.Input
{
public abstract class InputListenerComponent : Component
{
public abstract class BaseEventHandler<TEventArgs> : IDisposable where TEventArgs : EventArgs
{
// I just wanted to check if this will works. It worked. I lol'd, I kept it.
/// <summary>
/// Copies behaviour of <see cref="BaseEventHandler{T1}.Subscribe(ValueTuple{Component, EventHandler{TEventArgs}})"/>
/// </summary>
public static BaseEventHandler<TEventArgs> operator +(BaseEventHandler<TEventArgs> eventHandler, (GameComponent, EventHandler<TEventArgs>) tuple) => eventHandler.Subscribe(tuple);
/// <summary>
/// Copies behaviour of <see cref="BaseEventHandler{T1}.Subscribe(EventHandler{TEventArgs})"/>
/// </summary>
public static BaseEventHandler<TEventArgs> operator +(BaseEventHandler<TEventArgs> eventHandler, EventHandler<TEventArgs> @delegate) => eventHandler.Subscribe(@delegate);
/// <summary>
/// Copies behaviour of <see cref="BaseEventHandler{T1}.Unsubscribe"/>
/// </summary>
public static BaseEventHandler<TEventArgs> operator -(BaseEventHandler<TEventArgs> eventHandler, EventHandler<TEventArgs> @delegate) => eventHandler.Unsubscribe(@delegate);
public abstract BaseEventHandler<TEventArgs> Subscribe(GameComponent component, EventHandler<TEventArgs> @delegate);
public abstract BaseEventHandler<TEventArgs> Subscribe((GameComponent Component, EventHandler<TEventArgs> Delegate) tuple);
public abstract BaseEventHandler<TEventArgs> Subscribe(EventHandler<TEventArgs> @delegate);
public abstract BaseEventHandler<TEventArgs> Unsubscribe(EventHandler<TEventArgs> action);
protected BaseEventHandler()
{
if (!IsSubclassOf(GetType().GetGenericTypeDefinition(), typeof(BaseEventHandlerWithInvoke<>)))
throw new InvalidCastException($"Do not create custom implementations of {nameof(BaseEventHandler<TEventArgs>)}");
}
// TODO: Optimize
private static bool IsSubclassOf(Type type, Type baseType)
{
if (type == null || baseType == null || type == baseType)
return false;
var typeTypeInfo = type.GetTypeInfo();
var baseTypeTypeInfo = baseType.GetTypeInfo();
if (!baseTypeTypeInfo.IsGenericType)
{
if (!typeTypeInfo.IsGenericType)
return typeTypeInfo.IsSubclassOf(baseType);
}
else
{
baseType = baseType.GetGenericTypeDefinition();
baseTypeTypeInfo = baseType.GetTypeInfo();
}
type = typeTypeInfo.BaseType;
typeTypeInfo = type.GetTypeInfo();
var objectType = typeof(object);
while (type != objectType && type != null)
{
var curentType = typeTypeInfo.IsGenericType ? type.GetGenericTypeDefinition() : type;
if (curentType == baseType)
return true;
type = typeTypeInfo.BaseType;
typeTypeInfo = type.GetTypeInfo();
}
return false;
}
public abstract void Dispose();
}
protected abstract class BaseEventHandlerWithInvoke<TEventArgs> : BaseEventHandler<TEventArgs> where TEventArgs : EventArgs
{
protected internal abstract void Invoke(object sender, TEventArgs e);
}
protected sealed class CustomEventHandler<TEventArgs> : BaseEventHandlerWithInvoke<TEventArgs> where TEventArgs : EventArgs
{
// I would have used ValueTuple, but the comparison should be done only using EventHandler<TEventArgs> Action.
private class Storage : IEqualityComparer<Storage>
{
public readonly GameComponent Component;
public readonly EventHandler<TEventArgs> Delegate;
public Storage(GameComponent component, EventHandler<TEventArgs> @delegate) { Component = component; Delegate = @delegate; }
public Storage((GameComponent Component, EventHandler<TEventArgs> Delegate) tuple) { Component = tuple.Component; Delegate = tuple.Delegate; }
public bool Equals(Storage x, Storage y) => ((Delegate) x.Delegate).Equals((Delegate) y.Delegate);
public int GetHashCode(Storage obj) => obj.Component.GetHashCode() ^ ((Delegate) obj.Delegate).GetHashCode();
public override bool Equals(object obj)
{
var storage = obj as Storage;
return !ReferenceEquals(storage, null) && Equals(this, storage);
}
public override int GetHashCode() => GetHashCode(this);
}
private List<Storage> Subscribers { get; } = new List<Storage>();
public override BaseEventHandler<TEventArgs> Subscribe(GameComponent component, EventHandler<TEventArgs> @delegate) { lock (Subscribers) { Subscribers.Add(new Storage(component, @delegate)); return this; } }
public override BaseEventHandler<TEventArgs> Subscribe((GameComponent Component, EventHandler<TEventArgs> Delegate) tuple) { lock (Subscribers) { Subscribers.Add(new Storage(tuple)); return this; } }
public override BaseEventHandler<TEventArgs> Subscribe(EventHandler<TEventArgs> @delegate) { lock (Subscribers) { Subscribers.Add(new Storage(null, @delegate)); return this; } }
public override BaseEventHandler<TEventArgs> Unsubscribe(EventHandler<TEventArgs> @delegate) { lock (Subscribers) { Subscribers.Remove(new Storage(null, @delegate)); return this; } }
public override void Dispose()
{
if (Subscribers.Any())
{
#if DEBUG
throw new Exception("Leaked events!");
#endif
}
}
protected internal override void Invoke(object sender, TEventArgs e)
{
lock (Subscribers)
{
var tempList = Subscribers.ToList();
foreach (var subscriber in tempList)
{
if (subscriber != null)
{
if (subscriber.Component != null)
{
if (subscriber.Component.Enabled)
subscriber?.Delegate?.Invoke(sender, e);
}
else
subscriber.Delegate?.Invoke(sender, e);
}
}
}
}
}
protected sealed class CustomEventHandlerOld<TEventArgs> : BaseEventHandlerWithInvoke<TEventArgs> where TEventArgs : EventArgs
{
private event EventHandler<TEventArgs> EventHandler;
public override BaseEventHandler<TEventArgs> Subscribe(GameComponent component, EventHandler<TEventArgs> @delegate) { EventHandler += @delegate; return this; }
public override BaseEventHandler<TEventArgs> Subscribe((GameComponent Component, EventHandler<TEventArgs> Delegate) tuple) { EventHandler += tuple.Delegate; return this; }
public override BaseEventHandler<TEventArgs> Subscribe(EventHandler<TEventArgs> @delegate) { EventHandler += @delegate; return this; }
public override BaseEventHandler<TEventArgs> Unsubscribe(EventHandler<TEventArgs> @delegate) { EventHandler -= @delegate; return this; }
protected internal override void Invoke(object sender, TEventArgs e) { EventHandler?.Invoke(sender, e); }
public override void Dispose()
{
if (EventHandler.GetInvocationList().Any())
{
#if DEBUG
throw new Exception("Leaked events!");
#endif
}
}
}
//public ViewportAdapter ViewportAdapter { get; set; }
protected InputListenerComponent(PortableGame game) : base(game) { }
protected override void Dispose(bool disposing)
{
if (!_isDisposed)
{
if (disposing)
{
// Dispose every event to check if somthing is leaking.
var eventFields = GetType().GetTypeInfo().DeclaredFields
.Where(fieldInfo =>
{
var typeInfo = fieldInfo.FieldType.GetTypeInfo();
return typeInfo.IsGenericType && typeInfo.GetGenericTypeDefinition() == typeof(BaseEventHandler<>);
});
foreach (var fieldInfo in eventFields)
(fieldInfo.GetValue(this) as IDisposable)?.Dispose();
var eventProperties = GetType().GetTypeInfo().DeclaredProperties
.Where(propertyInfo =>
{
var typeInfo = propertyInfo.PropertyType.GetTypeInfo();
return typeInfo.IsGenericType && typeInfo.GetGenericTypeDefinition() == typeof(BaseEventHandler<>);
});
foreach (var propertyInfo in eventProperties)
(propertyInfo.GetValue(this) as IDisposable)?.Dispose();
}
}
base.Dispose(disposing);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: The structure for holding all of the data needed
** for object serialization and deserialization.
**
**
===========================================================*/
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Runtime.Remoting;
using System.Globalization;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Security;
using System.Runtime.CompilerServices;
namespace System.Runtime.Serialization
{
public sealed class SerializationInfo
{
private const int defaultSize = 4;
private const string s_mscorlibAssemblySimpleName = System.CoreLib.Name;
private const string s_mscorlibFileName = s_mscorlibAssemblySimpleName + ".dll";
// Even though we have a dictionary, we're still keeping all the arrays around for back-compat.
// Otherwise we may run into potentially breaking behaviors like GetEnumerator() not returning entries in the same order they were added.
internal String[] m_members;
internal Object[] m_data;
internal Type[] m_types;
private Dictionary<string, int> m_nameToIndex;
internal int m_currMember;
internal IFormatterConverter m_converter;
private String m_fullTypeName;
private String m_assemName;
private Type objectType;
private bool isFullTypeNameSetExplicit;
private bool isAssemblyNameSetExplicit;
private bool requireSameTokenInPartialTrust;
[CLSCompliant(false)]
public SerializationInfo(Type type, IFormatterConverter converter)
: this(type, converter, false)
{
}
[CLSCompliant(false)]
public SerializationInfo(Type type, IFormatterConverter converter, bool requireSameTokenInPartialTrust)
{
if ((object)type == null)
{
throw new ArgumentNullException(nameof(type));
}
if (converter == null)
{
throw new ArgumentNullException(nameof(converter));
}
Contract.EndContractBlock();
objectType = type;
m_fullTypeName = type.FullName;
m_assemName = type.Module.Assembly.FullName;
m_members = new String[defaultSize];
m_data = new Object[defaultSize];
m_types = new Type[defaultSize];
m_nameToIndex = new Dictionary<string, int>();
m_converter = converter;
this.requireSameTokenInPartialTrust = requireSameTokenInPartialTrust;
}
public String FullTypeName
{
get
{
return m_fullTypeName;
}
set
{
if (null == value)
{
throw new ArgumentNullException(nameof(value));
}
Contract.EndContractBlock();
m_fullTypeName = value;
isFullTypeNameSetExplicit = true;
}
}
public String AssemblyName
{
get
{
return m_assemName;
}
set
{
if (null == value)
{
throw new ArgumentNullException(nameof(value));
}
Contract.EndContractBlock();
if (requireSameTokenInPartialTrust)
{
DemandForUnsafeAssemblyNameAssignments(m_assemName, value);
}
m_assemName = value;
isAssemblyNameSetExplicit = true;
}
}
public void SetType(Type type)
{
if ((object)type == null)
{
throw new ArgumentNullException(nameof(type));
}
Contract.EndContractBlock();
if (requireSameTokenInPartialTrust)
{
DemandForUnsafeAssemblyNameAssignments(this.ObjectType.Assembly.FullName, type.Assembly.FullName);
}
if (!Object.ReferenceEquals(objectType, type))
{
objectType = type;
m_fullTypeName = type.FullName;
m_assemName = type.Module.Assembly.FullName;
isFullTypeNameSetExplicit = false;
isAssemblyNameSetExplicit = false;
}
}
internal static void DemandForUnsafeAssemblyNameAssignments(string originalAssemblyName, string newAssemblyName)
{
}
public int MemberCount
{
get
{
return m_currMember;
}
}
public Type ObjectType
{
get
{
return objectType;
}
}
public bool IsFullTypeNameSetExplicit
{
get
{
return isFullTypeNameSetExplicit;
}
}
public bool IsAssemblyNameSetExplicit
{
get
{
return isAssemblyNameSetExplicit;
}
}
public SerializationInfoEnumerator GetEnumerator()
{
return new SerializationInfoEnumerator(m_members, m_data, m_types, m_currMember);
}
private void ExpandArrays()
{
int newSize;
Debug.Assert(m_members.Length == m_currMember, "[SerializationInfo.ExpandArrays]m_members.Length == m_currMember");
newSize = (m_currMember * 2);
//
// In the pathological case, we may wrap
//
if (newSize < m_currMember)
{
if (Int32.MaxValue > m_currMember)
{
newSize = Int32.MaxValue;
}
}
//
// Allocate more space and copy the data
//
String[] newMembers = new String[newSize];
Object[] newData = new Object[newSize];
Type[] newTypes = new Type[newSize];
Array.Copy(m_members, newMembers, m_currMember);
Array.Copy(m_data, newData, m_currMember);
Array.Copy(m_types, newTypes, m_currMember);
//
// Assign the new arrys back to the member vars.
//
m_members = newMembers;
m_data = newData;
m_types = newTypes;
}
public void AddValue(String name, Object value, Type type)
{
if (null == name)
{
throw new ArgumentNullException(nameof(name));
}
if ((object)type == null)
{
throw new ArgumentNullException(nameof(type));
}
Contract.EndContractBlock();
AddValueInternal(name, value, type);
}
public void AddValue(String name, Object value)
{
if (null == value)
{
AddValue(name, value, typeof(Object));
}
else
{
AddValue(name, value, value.GetType());
}
}
public void AddValue(String name, bool value)
{
AddValue(name, (Object)value, typeof(bool));
}
public void AddValue(String name, char value)
{
AddValue(name, (Object)value, typeof(char));
}
[CLSCompliant(false)]
public void AddValue(String name, sbyte value)
{
AddValue(name, (Object)value, typeof(sbyte));
}
public void AddValue(String name, byte value)
{
AddValue(name, (Object)value, typeof(byte));
}
public void AddValue(String name, short value)
{
AddValue(name, (Object)value, typeof(short));
}
[CLSCompliant(false)]
public void AddValue(String name, ushort value)
{
AddValue(name, (Object)value, typeof(ushort));
}
public void AddValue(String name, int value)
{
AddValue(name, (Object)value, typeof(int));
}
[CLSCompliant(false)]
public void AddValue(String name, uint value)
{
AddValue(name, (Object)value, typeof(uint));
}
public void AddValue(String name, long value)
{
AddValue(name, (Object)value, typeof(long));
}
[CLSCompliant(false)]
public void AddValue(String name, ulong value)
{
AddValue(name, (Object)value, typeof(ulong));
}
public void AddValue(String name, float value)
{
AddValue(name, (Object)value, typeof(float));
}
public void AddValue(String name, double value)
{
AddValue(name, (Object)value, typeof(double));
}
public void AddValue(String name, decimal value)
{
AddValue(name, (Object)value, typeof(decimal));
}
public void AddValue(String name, DateTime value)
{
AddValue(name, (Object)value, typeof(DateTime));
}
internal void AddValueInternal(String name, Object value, Type type)
{
if (m_nameToIndex.ContainsKey(name))
{
BCLDebug.Trace("SER", "[SerializationInfo.AddValue]Tried to add ", name, " twice to the SI.");
throw new SerializationException(Environment.GetResourceString("Serialization_SameNameTwice"));
}
m_nameToIndex.Add(name, m_currMember);
//
// If we need to expand the arrays, do so.
//
if (m_currMember >= m_members.Length)
{
ExpandArrays();
}
//
// Add the data and then advance the counter.
//
m_members[m_currMember] = name;
m_data[m_currMember] = value;
m_types[m_currMember] = type;
m_currMember++;
}
/*=================================UpdateValue==================================
**Action: Finds the value if it exists in the current data. If it does, we replace
** the values, if not, we append it to the end. This is useful to the
** ObjectManager when it's performing fixups.
**Returns: void
**Arguments: name -- the name of the data to be updated.
** value -- the new value.
** type -- the type of the data being added.
**Exceptions: None. All error checking is done with asserts. Although public in coreclr,
** it's not exposed in a contract and is only meant to be used by corefx.
==============================================================================*/
// This should not be used by clients: exposing out this functionality would allow children
// to overwrite their parent's values. It is public in order to give corefx access to it for
// its ObjectManager implementation, but it should not be exposed out of a contract.
public void UpdateValue(String name, Object value, Type type)
{
Debug.Assert(null != name, "[SerializationInfo.UpdateValue]name!=null");
Debug.Assert(null != value, "[SerializationInfo.UpdateValue]value!=null");
Debug.Assert(null != (object)type, "[SerializationInfo.UpdateValue]type!=null");
int index = FindElement(name);
if (index < 0)
{
AddValueInternal(name, value, type);
}
else
{
m_data[index] = value;
m_types[index] = type;
}
}
private int FindElement(String name)
{
if (null == name)
{
throw new ArgumentNullException(nameof(name));
}
Contract.EndContractBlock();
BCLDebug.Trace("SER", "[SerializationInfo.FindElement]Looking for ", name, " CurrMember is: ", m_currMember);
int index;
if (m_nameToIndex.TryGetValue(name, out index))
{
return index;
}
return -1;
}
/*==================================GetElement==================================
**Action: Use FindElement to get the location of a particular member and then return
** the value of the element at that location. The type of the member is
** returned in the foundType field.
**Returns: The value of the element at the position associated with name.
**Arguments: name -- the name of the element to find.
** foundType -- the type of the element associated with the given name.
**Exceptions: None. FindElement does null checking and throws for elements not
** found.
==============================================================================*/
private Object GetElement(String name, out Type foundType)
{
int index = FindElement(name);
if (index == -1)
{
throw new SerializationException(Environment.GetResourceString("Serialization_NotFound", name));
}
Debug.Assert(index < m_data.Length, "[SerializationInfo.GetElement]index<m_data.Length");
Debug.Assert(index < m_types.Length, "[SerializationInfo.GetElement]index<m_types.Length");
foundType = m_types[index];
Debug.Assert((object)foundType != null, "[SerializationInfo.GetElement]foundType!=null");
return m_data[index];
}
private Object GetElementNoThrow(String name, out Type foundType)
{
int index = FindElement(name);
if (index == -1)
{
foundType = null;
return null;
}
Debug.Assert(index < m_data.Length, "[SerializationInfo.GetElement]index<m_data.Length");
Debug.Assert(index < m_types.Length, "[SerializationInfo.GetElement]index<m_types.Length");
foundType = m_types[index];
Debug.Assert((object)foundType != null, "[SerializationInfo.GetElement]foundType!=null");
return m_data[index];
}
//
// The user should call one of these getters to get the data back in the
// form requested.
//
public Object GetValue(String name, Type type)
{
if ((object)type == null)
{
throw new ArgumentNullException(nameof(type));
}
Contract.EndContractBlock();
RuntimeType rt = type as RuntimeType;
if (rt == null)
throw new ArgumentException(Environment.GetResourceString("Argument_MustBeRuntimeType"));
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, type) || type.IsAssignableFrom(foundType) || value == null)
{
return value;
}
Debug.Assert(m_converter != null, "[SerializationInfo.GetValue]m_converter!=null");
return m_converter.Convert(value, type);
}
internal Object GetValueNoThrow(String name, Type type)
{
Type foundType;
Object value;
Debug.Assert((object)type != null, "[SerializationInfo.GetValue]type ==null");
Debug.Assert(type is RuntimeType, "[SerializationInfo.GetValue]type is not a runtime type");
value = GetElementNoThrow(name, out foundType);
if (value == null)
return null;
if (Object.ReferenceEquals(foundType, type) || type.IsAssignableFrom(foundType) || value == null)
{
return value;
}
Debug.Assert(m_converter != null, "[SerializationInfo.GetValue]m_converter!=null");
return m_converter.Convert(value, type);
}
public bool GetBoolean(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(bool)))
{
return (bool)value;
}
return m_converter.ToBoolean(value);
}
public char GetChar(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(char)))
{
return (char)value;
}
return m_converter.ToChar(value);
}
[CLSCompliant(false)]
public sbyte GetSByte(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(sbyte)))
{
return (sbyte)value;
}
return m_converter.ToSByte(value);
}
public byte GetByte(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(byte)))
{
return (byte)value;
}
return m_converter.ToByte(value);
}
public short GetInt16(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(short)))
{
return (short)value;
}
return m_converter.ToInt16(value);
}
[CLSCompliant(false)]
public ushort GetUInt16(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(ushort)))
{
return (ushort)value;
}
return m_converter.ToUInt16(value);
}
public int GetInt32(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(int)))
{
return (int)value;
}
return m_converter.ToInt32(value);
}
[CLSCompliant(false)]
public uint GetUInt32(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(uint)))
{
return (uint)value;
}
return m_converter.ToUInt32(value);
}
public long GetInt64(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(long)))
{
return (long)value;
}
return m_converter.ToInt64(value);
}
[CLSCompliant(false)]
public ulong GetUInt64(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(ulong)))
{
return (ulong)value;
}
return m_converter.ToUInt64(value);
}
public float GetSingle(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(float)))
{
return (float)value;
}
return m_converter.ToSingle(value);
}
public double GetDouble(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(double)))
{
return (double)value;
}
return m_converter.ToDouble(value);
}
public decimal GetDecimal(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(decimal)))
{
return (decimal)value;
}
return m_converter.ToDecimal(value);
}
public DateTime GetDateTime(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(DateTime)))
{
return (DateTime)value;
}
return m_converter.ToDateTime(value);
}
public String GetString(String name)
{
Type foundType;
Object value;
value = GetElement(name, out foundType);
if (Object.ReferenceEquals(foundType, typeof(String)) || value == null)
{
return (String)value;
}
return m_converter.ToString(value);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Buffers;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace System.Security.Cryptography.Asn1
{
internal sealed partial class AsnWriter
{
/// <summary>
/// Write the provided string using the specified encoding type using the UNIVERSAL
/// tag corresponding to the encoding type.
/// </summary>
/// <param name="encodingType">
/// The <see cref="UniversalTagNumber"/> corresponding to the encoding to use.
/// </param>
/// <param name="str">The string to write.</param>
/// <exception cref="ArgumentNullException"><paramref name="str"/> is <c>null</c></exception>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="encodingType"/> is not a restricted character string encoding type --OR--
/// <paramref name="encodingType"/> is a restricted character string encoding type that is not
/// currently supported by this method
/// </exception>
/// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception>
/// <seealso cref="WriteCharacterString(Asn1Tag,UniversalTagNumber,string)"/>
public void WriteCharacterString(UniversalTagNumber encodingType, string str)
{
if (str == null)
throw new ArgumentNullException(nameof(str));
WriteCharacterString(encodingType, str.AsSpan());
}
/// <summary>
/// Write the provided string using the specified encoding type using the UNIVERSAL
/// tag corresponding to the encoding type.
/// </summary>
/// <param name="encodingType">
/// The <see cref="UniversalTagNumber"/> corresponding to the encoding to use.
/// </param>
/// <param name="str">The string to write.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="encodingType"/> is not a restricted character string encoding type --OR--
/// <paramref name="encodingType"/> is a restricted character string encoding type that is not
/// currently supported by this method
/// </exception>
/// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception>
/// <seealso cref="WriteCharacterString(Asn1Tag,UniversalTagNumber,ReadOnlySpan{char})"/>
public void WriteCharacterString(UniversalTagNumber encodingType, ReadOnlySpan<char> str)
{
Text.Encoding encoding = AsnCharacterStringEncodings.GetEncoding(encodingType);
WriteCharacterStringCore(new Asn1Tag(encodingType), encoding, str);
}
/// <summary>
/// Write the provided string using the specified encoding type using the specified
/// tag corresponding to the encoding type.
/// </summary>
/// <param name="tag">The tag to write.</param>
/// <param name="encodingType">
/// The <see cref="UniversalTagNumber"/> corresponding to the encoding to use.
/// </param>
/// <param name="str">The string to write.</param>
/// <exception cref="ArgumentNullException"><paramref name="str"/> is <c>null</c></exception>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="encodingType"/> is not a restricted character string encoding type --OR--
/// <paramref name="encodingType"/> is a restricted character string encoding type that is not
/// currently supported by this method
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="tag"/>.<see cref="Asn1Tag.TagClass"/> is
/// <see cref="TagClass.Universal"/>, but
/// <paramref name="tag"/>.<see cref="Asn1Tag.TagValue"/> is not correct for
/// the method
/// </exception>
/// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception>
public void WriteCharacterString(Asn1Tag tag, UniversalTagNumber encodingType, string str)
{
if (str == null)
throw new ArgumentNullException(nameof(str));
WriteCharacterString(tag, encodingType, str.AsSpan());
}
/// <summary>
/// Write the provided string using the specified encoding type using the specified
/// tag corresponding to the encoding type.
/// </summary>
/// <param name="tag">The tag to write.</param>
/// <param name="encodingType">
/// The <see cref="UniversalTagNumber"/> corresponding to the encoding to use.
/// </param>
/// <param name="str">The string to write.</param>
/// <exception cref="ArgumentOutOfRangeException">
/// <paramref name="encodingType"/> is not a restricted character string encoding type --OR--
/// <paramref name="encodingType"/> is a restricted character string encoding type that is not
/// currently supported by this method
/// </exception>
/// <exception cref="ArgumentException">
/// <paramref name="tag"/>.<see cref="Asn1Tag.TagClass"/> is
/// <see cref="TagClass.Universal"/>, but
/// <paramref name="tag"/>.<see cref="Asn1Tag.TagValue"/> is not correct for
/// the method
/// </exception>
/// <exception cref="ObjectDisposedException">The writer has been Disposed.</exception>
public void WriteCharacterString(Asn1Tag tag, UniversalTagNumber encodingType, ReadOnlySpan<char> str)
{
CheckUniversalTag(tag, encodingType);
Text.Encoding encoding = AsnCharacterStringEncodings.GetEncoding(encodingType);
WriteCharacterStringCore(tag, encoding, str);
}
// T-REC-X.690-201508 sec 8.23
private void WriteCharacterStringCore(Asn1Tag tag, Text.Encoding encoding, ReadOnlySpan<char> str)
{
int size = -1;
// T-REC-X.690-201508 sec 9.2
if (RuleSet == AsnEncodingRules.CER)
{
// TODO: Split this for netstandard vs netcoreapp for span?.
unsafe
{
fixed (char* strPtr = &MemoryMarshal.GetReference(str))
{
size = encoding.GetByteCount(strPtr, str.Length);
// If it exceeds the primitive segment size, use the constructed encoding.
if (size > AsnReader.MaxCERSegmentSize)
{
WriteConstructedCerCharacterString(tag, encoding, str, size);
return;
}
}
}
}
// TODO: Split this for netstandard vs netcoreapp for span?.
unsafe
{
fixed (char* strPtr = &MemoryMarshal.GetReference(str))
{
if (size < 0)
{
size = encoding.GetByteCount(strPtr, str.Length);
}
// Clear the constructed tag, if present.
WriteTag(tag.AsPrimitive());
WriteLength(size);
Span<byte> dest = _buffer.AsSpan(_offset, size);
fixed (byte* destPtr = &MemoryMarshal.GetReference(dest))
{
int written = encoding.GetBytes(strPtr, str.Length, destPtr, dest.Length);
if (written != size)
{
Debug.Fail($"Encoding produced different answer for GetByteCount ({size}) and GetBytes ({written})");
throw new InvalidOperationException();
}
}
_offset += size;
}
}
}
private void WriteConstructedCerCharacterString(Asn1Tag tag, Text.Encoding encoding, ReadOnlySpan<char> str, int size)
{
Debug.Assert(size > AsnReader.MaxCERSegmentSize);
byte[] tmp;
// TODO: Split this for netstandard vs netcoreapp for span?.
var localPool = ArrayPool<byte>.Shared;
unsafe
{
fixed (char* strPtr = &MemoryMarshal.GetReference(str))
{
tmp = localPool.Rent(size);
fixed (byte* destPtr = tmp)
{
int written = encoding.GetBytes(strPtr, str.Length, destPtr, tmp.Length);
if (written != size)
{
Debug.Fail(
$"Encoding produced different answer for GetByteCount ({size}) and GetBytes ({written})");
throw new InvalidOperationException();
}
}
}
}
WriteConstructedCerOctetString(tag, tmp.AsSpan(0, size));
Array.Clear(tmp, 0, size);
localPool.Return(tmp);
}
}
}
| |
//-----------------------------------------------------------------------
// <copyright file="GraphStages.cs" company="Akka.NET Project">
// Copyright (C) 2015-2016 Lightbend Inc. <http://www.lightbend.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Akka.Actor;
using Akka.Streams.Dsl;
using Akka.Streams.Implementation.Stages;
using Akka.Streams.Stage;
using Akka.Util;
namespace Akka.Streams.Implementation.Fusing
{
/// <summary>
/// TBD
/// </summary>
public static class GraphStages
{
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <returns>TBD</returns>
public static SimpleLinearGraphStage<T> Identity<T>() => Implementation.Fusing.Identity<T>.Instance;
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <returns>TBD</returns>
internal static GraphStageWithMaterializedValue<FlowShape<T, T>, Task> TerminationWatcher<T>()
=> Implementation.Fusing.TerminationWatcher<T>.Instance;
/// <summary>
/// Fusing graphs that have cycles involving FanIn stages might lead to deadlocks if
/// demand is not carefully managed.
///
/// This means that FanIn stages need to early pull every relevant input on startup.
/// This can either be implemented inside the stage itself, or this method can be used,
/// which adds a detacher stage to every input.
/// </summary>
/// <typeparam name="T">TBD</typeparam>
/// <param name="stage">TBD</param>
/// <returns>TBD</returns>
internal static IGraph<UniformFanInShape<T, T>, NotUsed> WithDetachedInputs<T>(GraphStage<UniformFanInShape<T, T>> stage)
{
return GraphDsl.Create(builder =>
{
var concat = builder.Add(stage);
var detachers = concat.Ins.Select(inlet =>
{
var detacher = builder.Add(new Detacher<T>());
builder.From(detacher).To(inlet);
return detacher.Inlet;
}).ToArray();
return new UniformFanInShape<T, T>(concat.Out, detachers);
});
}
}
/// <summary>
/// INTERNAL API
/// </summary>
public class GraphStageModule : AtomicModule
{
/// <summary>
/// TBD
/// </summary>
public readonly IGraphStageWithMaterializedValue<Shape, object> Stage;
/// <summary>
/// TBD
/// </summary>
/// <param name="shape">TBD</param>
/// <param name="attributes">TBD</param>
/// <param name="stage">TBD</param>
public GraphStageModule(Shape shape, Attributes attributes, IGraphStageWithMaterializedValue<Shape, object> stage)
{
Shape = shape;
Attributes = attributes;
Stage = stage;
}
/// <summary>
/// TBD
/// </summary>
public override Shape Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="shape">TBD</param>
/// <returns>TBD</returns>
public override IModule ReplaceShape(Shape shape) => new CopiedModule(shape, Attributes.None, this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override IModule CarbonCopy() => ReplaceShape(Shape.DeepCopy());
/// <summary>
/// TBD
/// </summary>
public override Attributes Attributes { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="attributes">TBD</param>
/// <returns>TBD</returns>
public override IModule WithAttributes(Attributes attributes) => new GraphStageModule(Shape, attributes, Stage);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => $"GraphStage({Stage}) [{GetHashCode()}%08x]";
}
/// <summary>
/// INTERNAL API
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public abstract class SimpleLinearGraphStage<T> : GraphStage<FlowShape<T, T>>
{
/// <summary>
/// TBD
/// </summary>
public readonly Inlet<T> Inlet;
/// <summary>
/// TBD
/// </summary>
public readonly Outlet<T> Outlet;
/// <summary>
/// TBD
/// </summary>
protected SimpleLinearGraphStage()
{
var name = GetType().Name;
Inlet = new Inlet<T>(name + ".in");
Outlet = new Outlet<T>(name + ".out");
Shape = new FlowShape<T, T>(Inlet, Outlet);
}
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, T> Shape { get; }
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public sealed class Identity<T> : SimpleLinearGraphStage<T>
{
#region internal classes
private sealed class Logic : InAndOutGraphStageLogic
{
private readonly Identity<T> _stage;
public Logic(Identity<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage.Inlet, this);
SetHandler(stage.Outlet, this);
}
public override void OnPush() => Push(_stage.Outlet, Grab(_stage.Inlet));
public override void OnPull() => Pull(_stage.Inlet);
}
#endregion
/// <summary>
/// TBD
/// </summary>
public static readonly Identity<T> Instance = new Identity<T>();
private Identity()
{
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; } = Attributes.CreateName("identityOp");
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
}
/// <summary>
/// INTERNAL API
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public sealed class Detacher<T> : GraphStage<FlowShape<T, T>>
{
#region internal classes
private sealed class Logic : InAndOutGraphStageLogic
{
private readonly Detacher<T> _stage;
public Logic(Detacher<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage.Inlet, this);
SetHandler(stage.Outlet, this);
}
public override void PreStart() => TryPull(_stage.Inlet);
public override void OnPush()
{
var outlet = _stage.Outlet;
if (IsAvailable(outlet))
{
var inlet = _stage.Inlet;
Push(outlet, Grab(inlet));
TryPull(inlet);
}
}
public override void OnUpstreamFinish()
{
if (!IsAvailable(_stage.Inlet))
CompleteStage();
}
public override void OnPull()
{
var inlet = _stage.Inlet;
if (IsAvailable(inlet))
{
var outlet = _stage.Outlet;
Push(outlet, Grab(inlet));
if (IsClosed(inlet))
CompleteStage();
else
Pull(inlet);
}
}
}
#endregion
/// <summary>
/// TBD
/// </summary>
public readonly Inlet<T> Inlet;
/// <summary>
/// TBD
/// </summary>
public readonly Outlet<T> Outlet;
/// <summary>
/// TBD
/// </summary>
public Detacher()
{
InitialAttributes = Attributes.CreateName("Detacher");
Inlet = new Inlet<T>("in");
Outlet = new Outlet<T>("out");
Shape = new FlowShape<T, T>(Inlet, Outlet);
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; }
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "Detacher";
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
internal sealed class TerminationWatcher<T> : GraphStageWithMaterializedValue<FlowShape<T, T>, Task>
{
/// <summary>
/// TBD
/// </summary>
public static readonly TerminationWatcher<T> Instance = new TerminationWatcher<T>();
#region internal classes
private sealed class Logic : InAndOutGraphStageLogic
{
private readonly TerminationWatcher<T> _stage;
private readonly TaskCompletionSource<NotUsed> _finishPromise;
public Logic(TerminationWatcher<T> stage, TaskCompletionSource<NotUsed> finishPromise) : base(stage.Shape)
{
_stage = stage;
_finishPromise = finishPromise;
SetHandler(stage._inlet, this);
SetHandler(stage._outlet, this);
}
public override void OnPush() => Push(_stage._outlet, Grab(_stage._inlet));
public override void OnUpstreamFinish()
{
_finishPromise.TrySetResult(NotUsed.Instance);
CompleteStage();
}
public override void OnUpstreamFailure(Exception e)
{
_finishPromise.TrySetException(e);
FailStage(e);
}
public override void OnPull() => Pull(_stage._inlet);
public override void OnDownstreamFinish()
{
_finishPromise.TrySetResult(NotUsed.Instance);
CompleteStage();
}
}
#endregion
private readonly Inlet<T> _inlet = new Inlet<T>("terminationWatcher.in");
private readonly Outlet<T> _outlet = new Outlet<T>("terminationWatcher.out");
private TerminationWatcher()
{
Shape = new FlowShape<T, T>(_inlet, _outlet);
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; } = DefaultAttributes.TerminationWatcher;
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
public override ILogicAndMaterializedValue<Task> CreateLogicAndMaterializedValue(Attributes inheritedAttributes)
{
var finishPromise = new TaskCompletionSource<NotUsed>();
return new LogicAndMaterializedValue<Task>(new Logic(this, finishPromise), finishPromise.Task);
}
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "TerminationWatcher";
}
// TODO: fix typo
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
internal sealed class FLowMonitorImpl<T> : AtomicReference<object>, IFlowMonitor
{
/// <summary>
/// TBD
/// </summary>
public FLowMonitorImpl() : base(FlowMonitor.Initialized.Instance)
{
}
/// <summary>
/// TBD
/// </summary>
public FlowMonitor.IStreamState State
{
get
{
var value = Value;
if(value is T)
return new FlowMonitor.Received<T>((T)value);
return value as FlowMonitor.IStreamState;
}
}
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
internal sealed class MonitorFlow<T> : GraphStageWithMaterializedValue<FlowShape<T, T>, IFlowMonitor>
{
#region Logic
private sealed class Logic : InAndOutGraphStageLogic
{
private readonly MonitorFlow<T> _stage;
private readonly FLowMonitorImpl<T> _monitor;
public Logic(MonitorFlow<T> stage, FLowMonitorImpl<T> monitor) : base(stage.Shape)
{
_stage = stage;
_monitor = monitor;
SetHandler(stage.In, this);
SetHandler(stage.Out, this);
}
public override void OnPush()
{
var message = Grab(_stage.In);
Push(_stage.Out, message);
_monitor.Value = message is FlowMonitor.IStreamState
? new FlowMonitor.Received<T>(message)
: (object)message;
}
public override void OnUpstreamFinish()
{
CompleteStage();
_monitor.Value = FlowMonitor.Finished.Instance;
}
public override void OnUpstreamFailure(Exception e)
{
FailStage(e);
_monitor.Value = new FlowMonitor.Failed(e);
}
public override void OnPull() => Pull(_stage.In);
public override void OnDownstreamFinish()
{
CompleteStage();
_monitor.Value = FlowMonitor.Finished.Instance;
}
public override string ToString() => "MonitorFlowLogic";
}
#endregion
/// <summary>
/// TBD
/// </summary>
public MonitorFlow()
{
Shape = new FlowShape<T, T>(In, Out);
}
/// <summary>
/// TBD
/// </summary>
public Inlet<T> In { get; } = new Inlet<T>("MonitorFlow.in");
/// <summary>
/// TBD
/// </summary>
public Outlet<T> Out { get; } = new Outlet<T>("MonitorFlow.out");
/// <summary>
/// TBD
/// </summary>
public override FlowShape<T, T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
public override ILogicAndMaterializedValue<IFlowMonitor> CreateLogicAndMaterializedValue(Attributes inheritedAttributes)
{
var monitor = new FLowMonitorImpl<T>();
var logic = new Logic(this, monitor);
return new LogicAndMaterializedValue<IFlowMonitor>(logic, monitor);
}
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "MonitorFlow";
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public sealed class TickSource<T> : GraphStageWithMaterializedValue<SourceShape<T>, ICancelable>
{
#region internal classes
[SuppressMessage("ReSharper", "MethodSupportsCancellation")]
private sealed class Logic : TimerGraphStageLogic, ICancelable
{
private readonly TickSource<T> _stage;
private readonly AtomicBoolean _cancelled = new AtomicBoolean();
private readonly AtomicReference<Action<NotUsed>> _cancelCallback =
new AtomicReference<Action<NotUsed>>(null);
public Logic(TickSource<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(_stage.Out, EagerTerminateOutput);
}
public override void PreStart()
{
_cancelCallback.Value = GetAsyncCallback<NotUsed>(_ => CompleteStage());
if(_cancelled)
CompleteStage();
else
ScheduleRepeatedly("TickTimer", _stage._initialDelay, _stage._interval);
}
protected internal override void OnTimer(object timerKey)
{
if (IsAvailable(_stage.Out) && !_cancelled)
Push(_stage.Out, _stage._tick);
}
public void Cancel()
{
if(!_cancelled.GetAndSet(true))
_cancelCallback.Value?.Invoke(NotUsed.Instance);
}
public bool IsCancellationRequested => _cancelled;
public CancellationToken Token { get; }
public void CancelAfter(TimeSpan delay) => Task.Delay(delay).ContinueWith(_ => Cancel());
public void CancelAfter(int millisecondsDelay) => Task.Delay(millisecondsDelay).ContinueWith(_ => Cancel());
public void Cancel(bool throwOnFirstException) => Cancel();
public override string ToString() => "TickSourceLogic";
}
#endregion
private readonly TimeSpan _initialDelay;
private readonly TimeSpan _interval;
private readonly T _tick;
/// <summary>
/// TBD
/// </summary>
/// <param name="initialDelay">TBD</param>
/// <param name="interval">TBD</param>
/// <param name="tick">TBD</param>
public TickSource(TimeSpan initialDelay, TimeSpan interval, T tick)
{
_initialDelay = initialDelay;
_interval = interval;
_tick = tick;
Shape = new SourceShape<T>(Out);
}
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes { get; } = DefaultAttributes.TickSource;
/// <summary>
/// TBD
/// </summary>
public Outlet<T> Out { get; } = new Outlet<T>("TimerSource.out");
/// <summary>
/// TBD
/// </summary>
public override SourceShape<T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
public override ILogicAndMaterializedValue<ICancelable> CreateLogicAndMaterializedValue(Attributes inheritedAttributes)
{
var logic = new Logic(this);
return new LogicAndMaterializedValue<ICancelable>(logic, logic);
}
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => $"TickSource({_initialDelay}, {_interval}, {_tick})";
}
/// <summary>
/// TBD
/// </summary>
public interface IMaterializedValueSource
{
/// <summary>
/// TBD
/// </summary>
IModule Module { get; }
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
IMaterializedValueSource CopySource();
/// <summary>
/// TBD
/// </summary>
Outlet Outlet { get; }
/// <summary>
/// TBD
/// </summary>
StreamLayout.IMaterializedValueNode Computation { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="result">TBD</param>
void SetValue(object result);
}
/// <summary>
/// INTERNAL API
///
/// This source is not reusable, it is only created internally.
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public sealed class MaterializedValueSource<T> : GraphStage<SourceShape<T>>, IMaterializedValueSource
{
#region internal classes
private sealed class Logic : GraphStageLogic
{
private readonly MaterializedValueSource<T> _source;
public Logic(MaterializedValueSource<T> source) : base(source.Shape)
{
_source = source;
SetHandler(source.Outlet, EagerTerminateOutput);
}
public override void PreStart()
{
var cb = GetAsyncCallback<T>(element => Emit(_source.Outlet, element, CompleteStage));
_source._promise.Task.ContinueWith(task => cb(task.Result), TaskContinuationOptions.ExecuteSynchronously);
}
}
#endregion
private static readonly Attributes Name = Attributes.CreateName("matValueSource");
/// <summary>
/// TBD
/// </summary>
public StreamLayout.IMaterializedValueNode Computation { get; }
Outlet IMaterializedValueSource.Outlet => Outlet;
/// <summary>
/// TBD
/// </summary>
public readonly Outlet<T> Outlet;
private readonly TaskCompletionSource<T> _promise = new TaskCompletionSource<T>();
/// <summary>
/// TBD
/// </summary>
/// <param name="computation">TBD</param>
/// <param name="outlet">TBD</param>
public MaterializedValueSource(StreamLayout.IMaterializedValueNode computation, Outlet<T> outlet)
{
Computation = computation;
Outlet = outlet;
Shape = new SourceShape<T>(Outlet);
}
/// <summary>
/// TBD
/// </summary>
/// <param name="computation">TBD</param>
public MaterializedValueSource(StreamLayout.IMaterializedValueNode computation) : this(computation, new Outlet<T>("matValue")) { }
/// <summary>
/// TBD
/// </summary>
protected override Attributes InitialAttributes => Name;
/// <summary>
/// TBD
/// </summary>
public override SourceShape<T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="value">TBD</param>
public void SetValue(T value) => _promise.SetResult(value);
void IMaterializedValueSource.SetValue(object result) => SetValue((T)result);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public MaterializedValueSource<T> CopySource() => new MaterializedValueSource<T>(Computation, Outlet);
IMaterializedValueSource IMaterializedValueSource.CopySource() => CopySource();
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => $"MaterializedValueSource({Computation})";
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public sealed class SingleSource<T> : GraphStage<SourceShape<T>>
{
#region Internal classes
private sealed class Logic : OutGraphStageLogic
{
private readonly SingleSource<T> _stage;
public Logic(SingleSource<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage.Outlet, this);
}
public override void OnPull()
{
Push(_stage.Outlet, _stage._element);
CompleteStage();
}
}
#endregion
private readonly T _element;
/// <summary>
/// TBD
/// </summary>
public readonly Outlet<T> Outlet = new Outlet<T>("single.out");
/// <summary>
/// TBD
/// </summary>
/// <param name="element">TBD</param>
public SingleSource(T element)
{
ReactiveStreamsCompliance.RequireNonNullElement(element);
_element = element;
Shape = new SourceShape<T>(Outlet);
}
/// <summary>
/// TBD
/// </summary>
public override SourceShape<T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
}
/// <summary>
/// TBD
/// </summary>
/// <typeparam name="T">TBD</typeparam>
public sealed class TaskSource<T> : GraphStage<SourceShape<T>>
{
#region Internal classes
private sealed class Logic : OutGraphStageLogic
{
private readonly TaskSource<T> _stage;
public Logic(TaskSource<T> stage) : base(stage.Shape)
{
_stage = stage;
SetHandler(stage.Outlet, this);
}
public override void OnPull()
{
var callback = GetAsyncCallback<Task<T>>(t =>
{
if (!t.IsCanceled && !t.IsFaulted)
Emit(_stage.Outlet, t.Result, CompleteStage);
else
FailStage(t.IsFaulted
? Flatten(t.Exception)
: new TaskCanceledException("Task was cancelled."));
});
_stage._task.ContinueWith(t => callback(t), TaskContinuationOptions.ExecuteSynchronously);
SetHandler(_stage.Outlet, EagerTerminateOutput); // After first pull we won't produce anything more
}
private Exception Flatten(AggregateException exception)
=> exception.InnerExceptions.Count == 1 ? exception.InnerExceptions[0] : exception;
}
#endregion
private readonly Task<T> _task;
/// <summary>
/// TBD
/// </summary>
public readonly Outlet<T> Outlet = new Outlet<T>("task.out");
/// <summary>
/// TBD
/// </summary>
/// <param name="task">TBD</param>
public TaskSource(Task<T> task)
{
ReactiveStreamsCompliance.RequireNonNullElement(task);
_task = task;
Shape = new SourceShape<T>(Outlet);
}
/// <summary>
/// TBD
/// </summary>
public override SourceShape<T> Shape { get; }
/// <summary>
/// TBD
/// </summary>
/// <param name="inheritedAttributes">TBD</param>
/// <returns>TBD</returns>
protected override GraphStageLogic CreateLogic(Attributes inheritedAttributes) => new Logic(this);
/// <summary>
/// TBD
/// </summary>
/// <returns>TBD</returns>
public override string ToString() => "TaskSource";
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using Mono.Addins;
using Nini.Config;
using OpenSim.Framework;
using OpenSim.Framework.Console;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using System;
using System.Collections.Generic;
namespace OpenSim.Region.OptionalModules.Avatar.Attachments
{
/// <summary>
/// A module that just holds commands for inspecting avatar appearance.
/// </summary>
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "SceneCommandsModule")]
public class SceneCommandsModule : ISceneCommandsModule, INonSharedRegionModule
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
private Scene m_scene;
public string Name { get { return "Scene Commands Module"; } }
public Type ReplaceableInterface { get { return null; } }
public void Initialise(IConfigSource source)
{
// m_log.DebugFormat("[SCENE COMMANDS MODULE]: INITIALIZED MODULE");
}
public void PostInitialise()
{
// m_log.DebugFormat("[SCENE COMMANDS MODULE]: POST INITIALIZED MODULE");
}
public void Close()
{
// m_log.DebugFormat("[SCENE COMMANDS MODULE]: CLOSED MODULE");
}
public void AddRegion(Scene scene)
{
// m_log.DebugFormat("[SCENE COMMANDS MODULE]: REGION {0} ADDED", scene.RegionInfo.RegionName);
m_scene = scene;
m_scene.RegisterModuleInterface<ISceneCommandsModule>(this);
}
public void RemoveRegion(Scene scene)
{
// m_log.DebugFormat("[SCENE COMMANDS MODULE]: REGION {0} REMOVED", scene.RegionInfo.RegionName);
}
public void RegionLoaded(Scene scene)
{
// m_log.DebugFormat("[ATTACHMENTS COMMAND MODULE]: REGION {0} LOADED", scene.RegionInfo.RegionName);
scene.AddCommand(
"Debug", this, "debug scene get",
"debug scene get",
"List current scene options.",
"If active is false then main scene update and maintenance loops are suspended.\n"
+ "If animations is true then extra animations debug information is logged.\n"
+ "If collisions is false then collisions with other objects are turned off.\n"
+ "If pbackup is false then periodic scene backup is turned off.\n"
+ "If physics is false then all physics objects are non-physical.\n"
+ "If scripting is false then no scripting operations happen.\n"
+ "If teleport is true then some extra teleport debug information is logged.\n"
+ "If updates is true then any frame which exceeds double the maximum desired frame time is logged.",
HandleDebugSceneGetCommand);
scene.AddCommand(
"Debug", this, "debug scene set",
"debug scene set active|collisions|pbackup|physics|scripting|teleport|updates true|false",
"Turn on scene debugging options.",
"If active is false then main scene update and maintenance loops are suspended.\n"
+ "If animations is true then extra animations debug information is logged.\n"
+ "If collisions is false then collisions with other objects are turned off.\n"
+ "If pbackup is false then periodic scene backup is turned off.\n"
+ "If physics is false then all physics objects are non-physical.\n"
+ "If scripting is false then no scripting operations happen.\n"
+ "If teleport is true then some extra teleport debug information is logged.\n"
+ "If updates is true then any frame which exceeds double the maximum desired frame time is logged.",
HandleDebugSceneSetCommand);
}
private void HandleDebugSceneGetCommand(string module, string[] args)
{
if (args.Length == 3)
{
if (MainConsole.Instance.ConsoleScene != m_scene && MainConsole.Instance.ConsoleScene != null)
return;
OutputSceneDebugOptions();
}
else
{
MainConsole.Instance.Output("Usage: debug scene get");
}
}
private void OutputSceneDebugOptions()
{
ConsoleDisplayList cdl = new ConsoleDisplayList();
cdl.AddRow("active", m_scene.Active);
cdl.AddRow("animations", m_scene.DebugAnimations);
cdl.AddRow("pbackup", m_scene.PeriodicBackup);
cdl.AddRow("physics", m_scene.PhysicsEnabled);
cdl.AddRow("scripting", m_scene.ScriptsEnabled);
cdl.AddRow("teleport", m_scene.DebugTeleporting);
cdl.AddRow("updates", m_scene.DebugUpdates);
MainConsole.Instance.OutputFormat("Scene {0} options:", m_scene.Name);
MainConsole.Instance.Output(cdl.ToString());
}
private void HandleDebugSceneSetCommand(string module, string[] args)
{
if (args.Length == 5)
{
if (MainConsole.Instance.ConsoleScene != m_scene && MainConsole.Instance.ConsoleScene != null)
return;
string key = args[3];
string value = args[4];
SetSceneDebugOptions(new Dictionary<string, string>() { { key, value } });
MainConsole.Instance.OutputFormat("Set {0} debug scene {1} = {2}", m_scene.Name, key, value);
}
else
{
MainConsole.Instance.Output(
"Usage: debug scene set active|collisions|pbackup|physics|scripting|teleport|updates true|false");
}
}
public void SetSceneDebugOptions(Dictionary<string, string> options)
{
if (options.ContainsKey("active"))
{
bool active;
if (bool.TryParse(options["active"], out active))
m_scene.Active = active;
}
if (options.ContainsKey("animations"))
{
bool active;
if (bool.TryParse(options["animations"], out active))
m_scene.DebugAnimations = active;
}
if (options.ContainsKey("pbackup"))
{
bool active;
if (bool.TryParse(options["pbackup"], out active))
m_scene.PeriodicBackup = active;
}
if (options.ContainsKey("scripting"))
{
bool enableScripts = true;
if (bool.TryParse(options["scripting"], out enableScripts))
m_scene.ScriptsEnabled = enableScripts;
}
if (options.ContainsKey("physics"))
{
bool enablePhysics;
if (bool.TryParse(options["physics"], out enablePhysics))
m_scene.PhysicsEnabled = enablePhysics;
}
// if (options.ContainsKey("collisions"))
// {
// // TODO: Implement. If false, should stop objects colliding, though possibly should still allow
// // the avatar themselves to collide with the ground.
// }
if (options.ContainsKey("teleport"))
{
bool enableTeleportDebugging;
if (bool.TryParse(options["teleport"], out enableTeleportDebugging))
m_scene.DebugTeleporting = enableTeleportDebugging;
}
if (options.ContainsKey("updates"))
{
bool enableUpdateDebugging;
if (bool.TryParse(options["updates"], out enableUpdateDebugging))
{
m_scene.DebugUpdates = enableUpdateDebugging;
GcNotify.Enabled = enableUpdateDebugging;
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.MirrorSequences
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// A sample API that uses a petstore as an example to demonstrate
/// features in the swagger-2.0 specification
/// </summary>
public partial class SequenceRequestResponseTest : ServiceClient<SequenceRequestResponseTest>, ISequenceRequestResponseTest
{
/// <summary>
/// The base URI of the service.
/// </summary>
public Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Initializes a new instance of the SequenceRequestResponseTest class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SequenceRequestResponseTest(params DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the SequenceRequestResponseTest class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SequenceRequestResponseTest(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the SequenceRequestResponseTest class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SequenceRequestResponseTest(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the SequenceRequestResponseTest class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
public SequenceRequestResponseTest(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// An optional partial-method to perform custom initialization.
///</summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.BaseUri = new Uri("http://petstore.swagger.wordnik.com/api");
SerializationSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
CustomInitialize();
}
/// <summary>
/// Creates a new pet in the store. Duplicates are allowed
/// </summary>
/// <param name='pets'>
/// Pets to add to the store
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<IList<Pet>>> AddPetWithHttpMessagesAsync(IList<Pet> pets, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (pets == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "pets");
}
if (pets != null)
{
foreach (var element in pets)
{
if (element != null)
{
element.Validate();
}
}
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("pets", pets);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "AddPet", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "pets").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(pets != null)
{
_requestContent = SafeJsonConvert.SerializeObject(pets, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorModelException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
ErrorModel _errorBody = SafeJsonConvert.DeserializeObject<ErrorModel>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<IList<Pet>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<IList<Pet>>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Adds new pet stylesin the store. Duplicates are allowed
/// </summary>
/// <param name='petStyle'>
/// Pet style to add to the store
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<IList<int?>>> AddPetStylesWithHttpMessagesAsync(IList<int?> petStyle, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (petStyle == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "petStyle");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("petStyle", petStyle);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "AddPetStyles", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "primitives").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(petStyle != null)
{
_requestContent = SafeJsonConvert.SerializeObject(petStyle, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
IList<ErrorModel> _errorBody = SafeJsonConvert.DeserializeObject<IList<ErrorModel>>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<IList<int?>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<IList<int?>>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Updates new pet stylesin the store. Duplicates are allowed
/// </summary>
/// <param name='petStyle'>
/// Pet style to add to the store
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<IList<int?>>> UpdatePetStylesWithHttpMessagesAsync(IList<int?> petStyle, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (petStyle == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "petStyle");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("petStyle", petStyle);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "UpdatePetStyles", tracingParameters);
}
// Construct URL
var _baseUrl = this.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "primitives").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(petStyle != null)
{
_requestContent = SafeJsonConvert.SerializeObject(petStyle, this.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
IList<ErrorModel> _errorBody = SafeJsonConvert.DeserializeObject<IList<ErrorModel>>(_responseContent, this.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<IList<int?>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<IList<int?>>(_responseContent, this.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Linq;
using QuantConnect.Orders.Fees;
using QuantConnect.Securities.Positions;
using QuantConnect.Securities.Option.StrategyMatcher;
namespace QuantConnect.Securities.Option
{
/// <summary>
/// Option strategy buying power model
/// </summary>
/// <remarks>
/// Reference used https://www.interactivebrokers.com/en/index.php?f=26660
/// </remarks>
public class OptionStrategyPositionGroupBuyingPowerModel : PositionGroupBuyingPowerModel
{
private readonly OptionStrategy _optionStrategy;
/// <summary>
/// Creates a new instance for a target option strategy
/// </summary>
/// <param name="optionStrategy">The option strategy to model</param>
public OptionStrategyPositionGroupBuyingPowerModel(OptionStrategy optionStrategy)
{
_optionStrategy = optionStrategy;
}
/// <summary>
/// Gets the margin currently allocated to the specified holding
/// </summary>
/// <param name="parameters">An object containing the security</param>
/// <returns>The maintenance margin required for the </returns>
public override MaintenanceMargin GetMaintenanceMargin(PositionGroupMaintenanceMarginParameters parameters)
{
if (_optionStrategy.Name == OptionStrategyDefinitions.CoveredCall.Name)
{
// MAX[In-the-money amount + Margin(long stock evaluated at min(mark price, strike(short call))), min(stock value, max(call value, long stock margin))]
var optionPosition = parameters.PositionGroup.Positions.FirstOrDefault(position => position.Symbol.SecurityType.IsOption());
var underlyingPosition = parameters.PositionGroup.Positions.FirstOrDefault(position => !position.Symbol.SecurityType.IsOption());
var optionSecurity = (Option)parameters.Portfolio.Securities[optionPosition.Symbol];
var underlyingSecurity = parameters.Portfolio.Securities[underlyingPosition.Symbol];
var intrinsicValue = optionSecurity.GetIntrinsicValue(underlyingSecurity.Price);
var inTheMoneyAmount = intrinsicValue * optionSecurity.ContractUnitOfTrade * Math.Abs(optionPosition.Quantity);
var underlyingValue = underlyingSecurity.Holdings.GetQuantityValue(underlyingPosition.Quantity);
var optionValue = optionSecurity.Holdings.GetQuantityValue(optionPosition.Quantity);
// mark price, strike price
var underlyingPriceToEvaluate = Math.Min(optionSecurity.Price, optionSecurity.StrikePrice);
var underlyingHypotheticalValue = underlyingSecurity.Holdings.GetQuantityValue(underlyingPosition.Quantity, underlyingPriceToEvaluate);
var hypotheticalMarginRequired = underlyingSecurity.BuyingPowerModel.GetMaintenanceMargin(
new MaintenanceMarginParameters(underlyingSecurity, underlyingPosition.Quantity, 0, underlyingHypotheticalValue));
var marginRequired = underlyingSecurity.BuyingPowerModel.GetMaintenanceMargin(
new MaintenanceMarginParameters(underlyingSecurity, underlyingPosition.Quantity, 0, underlyingValue));
var secondOperand = Math.Min(underlyingValue, Math.Max(optionValue, marginRequired));
var result = Math.Max(inTheMoneyAmount + hypotheticalMarginRequired, secondOperand);
var inAccountCurrency = parameters.Portfolio.CashBook.ConvertToAccountCurrency(result, optionSecurity.QuoteCurrency.Symbol);
return new MaintenanceMargin(inAccountCurrency);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.CoveredPut.Name)
{
// Initial Stock Margin Requirement + In the Money Amount
var optionPosition = parameters.PositionGroup.Positions.FirstOrDefault(position => position.Symbol.SecurityType.IsOption());
var underlyingPosition = parameters.PositionGroup.Positions.FirstOrDefault(position => !position.Symbol.SecurityType.IsOption());
var optionSecurity = (Option)parameters.Portfolio.Securities[optionPosition.Symbol];
var underlyingSecurity = parameters.Portfolio.Securities[underlyingPosition.Symbol];
var intrinsicValue = optionSecurity.GetIntrinsicValue(underlyingSecurity.Price);
var inTheMoneyAmount = intrinsicValue * optionSecurity.ContractUnitOfTrade * Math.Abs(optionPosition.Quantity);
var initialMarginRequirement = underlyingSecurity.BuyingPowerModel.GetInitialMarginRequirement(underlyingSecurity, underlyingPosition.Quantity);
var result = Math.Abs(initialMarginRequirement) + inTheMoneyAmount;
var inAccountCurrency = parameters.Portfolio.CashBook.ConvertToAccountCurrency(result, optionSecurity.QuoteCurrency.Symbol);
return new MaintenanceMargin(inAccountCurrency);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.BearCallSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.BullCallSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.CallCalendarSpread.Name)
{
var result = GetLongCallShortCallStrikeDifferenceMargin(parameters.PositionGroup, parameters.Portfolio);
return new MaintenanceMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.BearPutSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.BullPutSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.PutCalendarSpread.Name)
{
var result = GetShortPutLongPutStrikeDifferenceMargin(parameters.PositionGroup, parameters.Portfolio);
return new MaintenanceMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.Straddle.Name || _optionStrategy.Name == OptionStrategyDefinitions.Strangle.Name)
{
// Margined as two long options.
var callOption = parameters.PositionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Call);
var callSecurity = (Option)parameters.Portfolio.Securities[callOption.Symbol];
var callMargin = callSecurity.BuyingPowerModel.GetMaintenanceMargin(MaintenanceMarginParameters.ForQuantityAtCurrentPrice(
callSecurity, callOption.Quantity));
var putOption = parameters.PositionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Put);
var putSecurity = (Option)parameters.Portfolio.Securities[putOption.Symbol];
var putMargin = putSecurity.BuyingPowerModel.GetMaintenanceMargin(MaintenanceMarginParameters.ForQuantityAtCurrentPrice(
putSecurity, putOption.Quantity));
var result = callMargin.Value + putMargin.Value;
return new MaintenanceMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.ButterflyCall.Name || _optionStrategy.Name == OptionStrategyDefinitions.ButterflyPut.Name)
{
return new MaintenanceMargin(0);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.ShortButterflyPut.Name || _optionStrategy.Name == OptionStrategyDefinitions.ShortButterflyCall.Name)
{
var result = GetMiddleAndLowStrikeDifference(parameters.PositionGroup, parameters.Portfolio);
return new MaintenanceMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.IronCondor.Name)
{
var result = GetShortPutLongPutStrikeDifferenceMargin(parameters.PositionGroup, parameters.Portfolio);
return new MaintenanceMargin(result);
}
throw new NotImplementedException($"Option strategy {_optionStrategy.Name} margin modeling has yet to be implemented");
}
/// <summary>
/// The margin that must be held in order to increase the position by the provided quantity
/// </summary>
/// <param name="parameters">An object containing the security and quantity</param>
public override InitialMargin GetInitialMarginRequirement(PositionGroupInitialMarginParameters parameters)
{
if (_optionStrategy.Name == OptionStrategyDefinitions.CoveredCall.Name)
{
// Max(Call Value, Long Stock Initial Margin)
var optionPosition = parameters.PositionGroup.Positions.FirstOrDefault(position => position.Symbol.SecurityType.IsOption());
var underlyingPosition = parameters.PositionGroup.Positions.FirstOrDefault(position => !position.Symbol.SecurityType.IsOption());
var optionSecurity = (Option)parameters.Portfolio.Securities[optionPosition.Symbol];
var underlyingSecurity = parameters.Portfolio.Securities[underlyingPosition.Symbol];
var optionValue = optionSecurity.Holdings.GetQuantityValue(optionPosition.Quantity);
var marginRequired = underlyingSecurity.BuyingPowerModel.GetInitialMarginRequirement(underlyingSecurity, underlyingPosition.Quantity);
var result = Math.Max(optionValue, marginRequired);
var inAccountCurrency = parameters.Portfolio.CashBook.ConvertToAccountCurrency(result, optionSecurity.QuoteCurrency.Symbol);
return new InitialMargin(inAccountCurrency);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.CoveredPut.Name)
{
// Initial Stock Margin Requirement + In the Money Amount
var margin = GetMaintenanceMargin(new PositionGroupMaintenanceMarginParameters(parameters.Portfolio, parameters.PositionGroup));
return new InitialMargin(margin.Value);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.BearCallSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.BullCallSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.CallCalendarSpread.Name)
{
var result = GetLongCallShortCallStrikeDifferenceMargin(parameters.PositionGroup, parameters.Portfolio);
return new InitialMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.BearPutSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.BullPutSpread.Name
|| _optionStrategy.Name == OptionStrategyDefinitions.PutCalendarSpread.Name)
{
var result = GetShortPutLongPutStrikeDifferenceMargin(parameters.PositionGroup, parameters.Portfolio);
return new InitialMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.Straddle.Name || _optionStrategy.Name == OptionStrategyDefinitions.Strangle.Name)
{
// Margined as two long options.
var callOption = parameters.PositionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Call);
var callSecurity = (Option)parameters.Portfolio.Securities[callOption.Symbol];
var callMargin = callSecurity.BuyingPowerModel.GetInitialMarginRequirement(callSecurity, callOption.Quantity);
var putOption = parameters.PositionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Put);
var putSecurity = (Option)parameters.Portfolio.Securities[putOption.Symbol];
var putMargin = putSecurity.BuyingPowerModel.GetInitialMarginRequirement(putSecurity, putOption.Quantity);
var result = callMargin + putMargin;
return new InitialMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.ButterflyCall.Name || _optionStrategy.Name == OptionStrategyDefinitions.ButterflyPut.Name)
{
return new InitialMargin(0);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.ShortButterflyPut.Name || _optionStrategy.Name == OptionStrategyDefinitions.ShortButterflyCall.Name)
{
var result = GetMiddleAndLowStrikeDifference(parameters.PositionGroup, parameters.Portfolio);
return new InitialMargin(result);
}
else if (_optionStrategy.Name == OptionStrategyDefinitions.IronCondor.Name)
{
var result = GetShortPutLongPutStrikeDifferenceMargin(parameters.PositionGroup, parameters.Portfolio);
return new InitialMargin(result);
}
throw new NotImplementedException($"Option strategy {_optionStrategy.Name} margin modeling has yet to be implemented");
}
/// <summary>
/// Gets the total margin required to execute the specified order in units of the account currency including fees
/// </summary>
/// <param name="parameters">An object containing the portfolio, the security and the order</param>
/// <returns>The total margin in terms of the currency quoted in the order</returns>
public override InitialMargin GetInitialMarginRequiredForOrder(PositionGroupInitialMarginForOrderParameters parameters)
{
var security = parameters.Portfolio.Securities[parameters.Order.Symbol];
var fees = security.FeeModel.GetOrderFee(new OrderFeeParameters(security, parameters.Order));
var feesInAccountCurrency = parameters.Portfolio.CashBook.ConvertToAccountCurrency(fees.Value);
var initialMarginRequired = GetInitialMarginRequirement(new PositionGroupInitialMarginParameters(parameters.Portfolio, parameters.PositionGroup));
var feesWithSign = Math.Sign(initialMarginRequired) * feesInAccountCurrency.Amount;
return new InitialMargin(feesWithSign + initialMarginRequired);
}
/// <summary>
/// Returns a string that represents the current object.
/// </summary>
/// <returns>A string that represents the current object.</returns>
public override string ToString()
{
return _optionStrategy.Name;
}
/// <summary>
/// Returns the Maximum (Short Put Strike - Long Put Strike, 0)
/// </summary>
private static decimal GetShortPutLongPutStrikeDifferenceMargin(IPositionGroup positionGroup, SecurityPortfolioManager portfolio)
{
var longOption = positionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Put && position.Quantity > 0);
var shortOption = positionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Put && position.Quantity < 0);
var optionSecurity = (Option)portfolio.Securities[longOption.Symbol];
// Maximum (Short Put Strike - Long Put Strike, 0)
var strikeDifference = shortOption.Symbol.ID.StrikePrice - longOption.Symbol.ID.StrikePrice;
var result = Math.Max(strikeDifference * optionSecurity.ContractUnitOfTrade * Math.Abs(positionGroup.Quantity), 0);
// convert into account currency
return portfolio.CashBook.ConvertToAccountCurrency(result, optionSecurity.QuoteCurrency.Symbol);
}
/// <summary>
/// Returns the Maximum (Strike Long Call - Strike Short Call, 0)
/// </summary>
private static decimal GetLongCallShortCallStrikeDifferenceMargin(IPositionGroup positionGroup, SecurityPortfolioManager portfolio)
{
var longOption = positionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Call && position.Quantity > 0);
var shortOption = positionGroup.Positions.Single(position => position.Symbol.ID.OptionRight == OptionRight.Call && position.Quantity < 0);
var optionSecurity = (Option)portfolio.Securities[longOption.Symbol];
var strikeDifference = longOption.Symbol.ID.StrikePrice - shortOption.Symbol.ID.StrikePrice;
var result = Math.Max(strikeDifference * optionSecurity.ContractUnitOfTrade * Math.Abs(positionGroup.Quantity), 0);
// convert into account currency
return portfolio.CashBook.ConvertToAccountCurrency(result, optionSecurity.QuoteCurrency.Symbol);
}
/// <summary>
/// Returns the Maximum (Middle Strike - Lowest Strike, 0)
/// </summary>
private static decimal GetMiddleAndLowStrikeDifference(IPositionGroup positionGroup, SecurityPortfolioManager portfolio)
{
var options = positionGroup.Positions.OrderBy(position => position.Symbol.ID.StrikePrice).ToList();
var lowestCallStrike = options[0].Symbol.ID.StrikePrice;
var middleCallStrike = options[1].Symbol.ID.StrikePrice;
var optionSecurity = (Option)portfolio.Securities[options[0].Symbol];
var strikeDifference = Math.Max((middleCallStrike - lowestCallStrike) * optionSecurity.ContractUnitOfTrade * Math.Abs(positionGroup.Quantity), 0);
// convert into account currency
return portfolio.CashBook.ConvertToAccountCurrency(strikeDifference, optionSecurity.QuoteCurrency.Symbol);
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the elasticbeanstalk-2010-12-01.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.ElasticBeanstalk.Model
{
/// <summary>
/// Describes the settings for a configuration set.
/// </summary>
public partial class ConfigurationSettingsDescription
{
private string _applicationName;
private DateTime? _dateCreated;
private DateTime? _dateUpdated;
private ConfigurationDeploymentStatus _deploymentStatus;
private string _description;
private string _environmentName;
private List<ConfigurationOptionSetting> _optionSettings = new List<ConfigurationOptionSetting>();
private string _solutionStackName;
private string _templateName;
/// <summary>
/// Gets and sets the property ApplicationName.
/// <para>
/// The name of the application associated with this configuration set.
/// </para>
/// </summary>
public string ApplicationName
{
get { return this._applicationName; }
set { this._applicationName = value; }
}
// Check to see if ApplicationName property is set
internal bool IsSetApplicationName()
{
return this._applicationName != null;
}
/// <summary>
/// Gets and sets the property DateCreated.
/// <para>
/// The date (in UTC time) when this configuration set was created.
/// </para>
/// </summary>
public DateTime DateCreated
{
get { return this._dateCreated.GetValueOrDefault(); }
set { this._dateCreated = value; }
}
// Check to see if DateCreated property is set
internal bool IsSetDateCreated()
{
return this._dateCreated.HasValue;
}
/// <summary>
/// Gets and sets the property DateUpdated.
/// <para>
/// The date (in UTC time) when this configuration set was last modified.
/// </para>
/// </summary>
public DateTime DateUpdated
{
get { return this._dateUpdated.GetValueOrDefault(); }
set { this._dateUpdated = value; }
}
// Check to see if DateUpdated property is set
internal bool IsSetDateUpdated()
{
return this._dateUpdated.HasValue;
}
/// <summary>
/// Gets and sets the property DeploymentStatus.
/// <para>
/// If this configuration set is associated with an environment, the <code>DeploymentStatus</code>
/// parameter indicates the deployment status of this configuration set:
/// </para>
/// <enumValues> <value name="null">
/// <para>
/// <code>null</code>: This configuration is not associated with a running environment.
///
/// </para>
/// </value> <value name="pending">
/// <para>
/// <code>pending</code>: This is a draft configuration that is not deployed to the associated
/// environment but is in the process of deploying.
/// </para>
/// </value> <value name="deployed">
/// <para>
/// <code>deployed</code>: This is the configuration that is currently deployed to the
/// associated running environment.
/// </para>
/// </value> <value name="failed">
/// <para>
/// <code>failed</code>: This is a draft configuration, that failed to successfully deploy.
///
/// </para>
/// </value> </enumValues> <ul> <li> <code>null</code>: This configuration is not associated
/// with a running environment. </li> <li> <code>pending</code>: This is a draft configuration
/// that is not deployed to the associated environment but is in the process of deploying.
/// </li> <li> <code>deployed</code>: This is the configuration that is currently deployed
/// to the associated running environment. </li> <li> <code>failed</code>: This is a draft
/// configuration that failed to successfully deploy. </li> </ul>
/// </summary>
public ConfigurationDeploymentStatus DeploymentStatus
{
get { return this._deploymentStatus; }
set { this._deploymentStatus = value; }
}
// Check to see if DeploymentStatus property is set
internal bool IsSetDeploymentStatus()
{
return this._deploymentStatus != null;
}
/// <summary>
/// Gets and sets the property Description.
/// <para>
/// Describes this configuration set.
/// </para>
/// </summary>
public string Description
{
get { return this._description; }
set { this._description = value; }
}
// Check to see if Description property is set
internal bool IsSetDescription()
{
return this._description != null;
}
/// <summary>
/// Gets and sets the property EnvironmentName.
/// <para>
/// If not <code>null</code>, the name of the environment for this configuration set.
///
/// </para>
/// </summary>
public string EnvironmentName
{
get { return this._environmentName; }
set { this._environmentName = value; }
}
// Check to see if EnvironmentName property is set
internal bool IsSetEnvironmentName()
{
return this._environmentName != null;
}
/// <summary>
/// Gets and sets the property OptionSettings.
/// <para>
/// A list of the configuration options and their values in this configuration set.
/// </para>
/// </summary>
public List<ConfigurationOptionSetting> OptionSettings
{
get { return this._optionSettings; }
set { this._optionSettings = value; }
}
// Check to see if OptionSettings property is set
internal bool IsSetOptionSettings()
{
return this._optionSettings != null && this._optionSettings.Count > 0;
}
/// <summary>
/// Gets and sets the property SolutionStackName.
/// <para>
/// The name of the solution stack this configuration set uses.
/// </para>
/// </summary>
public string SolutionStackName
{
get { return this._solutionStackName; }
set { this._solutionStackName = value; }
}
// Check to see if SolutionStackName property is set
internal bool IsSetSolutionStackName()
{
return this._solutionStackName != null;
}
/// <summary>
/// Gets and sets the property TemplateName.
/// <para>
/// If not <code>null</code>, the name of the configuration template for this configuration
/// set.
/// </para>
/// </summary>
public string TemplateName
{
get { return this._templateName; }
set { this._templateName = value; }
}
// Check to see if TemplateName property is set
internal bool IsSetTemplateName()
{
return this._templateName != null;
}
}
}
| |
using System;
using System.Text;
using System.Web.Security;
using System.Collections.Specialized;
using System.Security.Cryptography;
using CsDO.Lib;
using System.Web.Configuration;
using System.Configuration.Provider;
using System.Configuration;
using System.Web.Hosting;
using System.Runtime.Remoting;
using System.Reflection;
using System.Collections;
namespace CsDO.Membership
{
public sealed class CsDOMembershipProvider : MembershipProvider
{
#region Private Fields
private int newPasswordLength = 8;
private MachineKeySection machineKey;
private Type _userObjectType;
internal PropertyInfo _User_Name;
internal PropertyInfo _User_Email;
internal PropertyInfo _User_PasswordQuestion;
internal PropertyInfo _User_Comment;
internal PropertyInfo _User_IsApproved;
internal PropertyInfo _User_IsLockedOut;
internal PropertyInfo _User_CreationDate;
internal PropertyInfo _User_LastLoginDate;
internal PropertyInfo _User_LastActivityDate;
internal PropertyInfo _User_LastPasswordChangedDate;
internal PropertyInfo _User_LastLockedOutDate;
internal PropertyInfo _User_Password;
internal PropertyInfo _User_PasswordAnswer;
internal PropertyInfo _User_IsOnLine;
internal PropertyInfo _User_FailedPasswordAttemptCount;
internal PropertyInfo _User_FailedPasswordAttemptWindowStart;
internal PropertyInfo _User_FailedPasswordAnswerAttemptCount;
internal PropertyInfo _User_FailedPasswordAnswerAttemptWindowStart;
//TODO: Not used yet
//private string assemblyName;
//private string roleType;
//private string usersInRolesType;
#endregion
#region Helper functions
private string GetConfigValue(string configValue, string defaultValue)
{
if (String.IsNullOrEmpty(configValue))
return defaultValue;
return configValue;
}
internal DataObject GetUser(string username)
{
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
return data;
else
return null;
}
internal DataObject NewUser()
{
return (DataObject) Activator.CreateInstance(_userObjectType);
}
private PropertyInfo GetProperty(Type customAttribute)
{
foreach (PropertyInfo pi in _userObjectType.GetProperties())
if (pi.GetType().GetCustomAttributes(customAttribute, true).Length > 0)
return pi;
return null;
}
private void UpdateFailureCount(string username, string failureType)
{
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
DateTime windowStart = new DateTime();
int failureCount = 0;
if (failureType == "password")
{
failureCount = (int)_User_FailedPasswordAttemptCount.GetValue(data, null);
windowStart = (DateTime)_User_FailedPasswordAttemptWindowStart.GetValue(data, null);
}
if (failureType == "passwordAnswer")
{
failureCount = (int)_User_FailedPasswordAnswerAttemptCount.GetValue(data, null);
windowStart = (DateTime)_User_FailedPasswordAnswerAttemptWindowStart.GetValue(data, null);
}
DateTime windowEnd = windowStart.AddMinutes(PasswordAttemptWindow);
if (failureCount == 0 || DateTime.Now > windowEnd)
{
// First password failure or outside of PasswordAttemptWindow.
// Start a new password failure count from 1 and a new window starting now.
if (failureType == "password")
{
_User_FailedPasswordAttemptCount.SetValue(data, 1, null);
_User_FailedPasswordAttemptWindowStart.SetValue(data, DateTime.Now, null);
}
if (failureType == "passwordAnswer")
{
_User_FailedPasswordAnswerAttemptCount.SetValue(data, 1, null);
_User_FailedPasswordAnswerAttemptWindowStart.SetValue(data, DateTime.Now, null);
}
data.update();
}
else
{
if (failureCount++ >= MaxInvalidPasswordAttempts)
{
// Password attempts have exceeded the failure threshold. Lock out
// the user.
_User_IsLockedOut.SetValue(data, true, null);
_User_LastLockedOutDate.SetValue(data, DateTime.Now, null);
data.update();
}
else
{
// Password attempts have not exceeded the failure threshold. Update
// the failure counts. Leave the window the same.
if (failureType == "password")
_User_FailedPasswordAttemptCount.SetValue(data, failureCount, null);
if (failureType == "passwordAnswer")
_User_FailedPasswordAnswerAttemptCount.SetValue(data, failureCount, null);
data.update();
}
}
}
else
{
throw new ProviderException("Unable to update failure count and window start.");
}
}
private bool CheckPassword(string password, string dbpassword)
{
string pass1 = password;
string pass2 = dbpassword;
switch (PasswordFormat)
{
case MembershipPasswordFormat.Encrypted:
pass2 = UnEncodePassword(dbpassword);
break;
case MembershipPasswordFormat.Hashed:
pass1 = EncodePassword(password);
break;
default:
break;
}
if (pass1 == pass2)
{
return true;
}
return false;
}
private string EncodePassword(string password)
{
string encodedPassword = password;
switch (PasswordFormat)
{
case MembershipPasswordFormat.Clear:
break;
case MembershipPasswordFormat.Encrypted:
encodedPassword =
Convert.ToBase64String(EncryptPassword(Encoding.Unicode.GetBytes(password)));
break;
case MembershipPasswordFormat.Hashed:
HMACSHA1 hash = new HMACSHA1();
hash.Key = HexToByte(machineKey.ValidationKey);
encodedPassword =
Convert.ToBase64String(hash.ComputeHash(Encoding.Unicode.GetBytes(password)));
break;
default:
throw new ProviderException("Unsupported password format.");
}
return encodedPassword;
}
private string UnEncodePassword(string encodedPassword)
{
string password = encodedPassword;
switch (PasswordFormat)
{
case MembershipPasswordFormat.Clear:
break;
case MembershipPasswordFormat.Encrypted:
password =
Encoding.Unicode.GetString(DecryptPassword(Convert.FromBase64String(password)));
break;
case MembershipPasswordFormat.Hashed:
throw new ProviderException("Cannot unencode a hashed password.");
default:
throw new ProviderException("Unsupported password format.");
}
return password;
}
private byte[] HexToByte(string hexString)
{
byte[] returnBytes = new byte[hexString.Length / 2];
for (int i = 0; i < returnBytes.Length; i++)
returnBytes[i] = Convert.ToByte(hexString.Substring(i * 2, 2), 16);
return returnBytes;
}
private MembershipUser GetUserFromReader(DataObject data)
{
object providerUserKey = data.getPrimaryKey();
string username = (string) _User_Name.GetValue(data, null);
string email = (string)_User_Email.GetValue(data, null);
string passwordQuestion = (string)_User_PasswordQuestion.GetValue(data, null);
string comment = (string)_User_Comment.GetValue(data, null);
bool isApproved = (bool)_User_IsApproved.GetValue(data, null);
bool isLockedOut = (bool)_User_IsLockedOut.GetValue(data, null);
DateTime creationDate = (DateTime)_User_CreationDate.GetValue(data, null);
DateTime lastLoginDate = (DateTime)_User_LastLoginDate.GetValue(data, null);
DateTime lastActivityDate = (DateTime)_User_LastActivityDate.GetValue(data, null);
DateTime lastPasswordChangedDate = (DateTime)_User_LastPasswordChangedDate.GetValue(data, null);
DateTime lastLockedOutDate = (DateTime)_User_LastLockedOutDate.GetValue(data, null);
MembershipUser u = new MembershipUser(this.Name,
username,
providerUserKey,
email,
passwordQuestion,
comment,
isApproved,
isLockedOut,
creationDate,
lastLoginDate,
lastActivityDate,
lastPasswordChangedDate,
lastLockedOutDate);
return u;
}
internal void SetupParameters(string assemblyName, string userObjectType)
{
ObjectHandle obj = Activator.CreateInstance(assemblyName, userObjectType);
_userObjectType = obj.Unwrap().GetType();
if (!_userObjectType.IsSubclassOf(typeof(DataObject)) && _userObjectType != typeof(DataObject))
throw new ProviderException("User Data Object should be a subtype of CsDO.Lib.DataObject .");
_User_Comment = GetProperty(typeof(User_Comment));
if (_User_Comment == null)
throw new ProviderException("Must have Comment field.");
_User_CreationDate = GetProperty(typeof(User_CreationDate));
if (_User_CreationDate == null)
throw new ProviderException("Must have Creation Date field.");
_User_Email = GetProperty(typeof(User_Email));
if (_User_Email == null)
throw new ProviderException("Must have User Email field.");
_User_IsApproved = GetProperty(typeof(User_IsApproved));
if (_User_IsApproved == null)
throw new ProviderException("Must have Approved field.");
_User_IsLockedOut = GetProperty(typeof(User_IsLockedOut));
if (_User_IsLockedOut == null)
throw new ProviderException("Must have Locked Out field.");
_User_LastActivityDate = GetProperty(typeof(User_LastActivityDate));
if (_User_LastActivityDate == null)
throw new ProviderException("Must have Last Activity Date field.");
_User_LastLockedOutDate = GetProperty(typeof(User_LastLockedOutDate));
if (_User_LastLockedOutDate == null)
throw new ProviderException("Must have Last Locked Out Date field.");
_User_LastLoginDate = GetProperty(typeof(User_LastLoginDate));
if (_User_LastLoginDate == null)
throw new ProviderException("Must have last Login Date field.");
_User_LastPasswordChangedDate = GetProperty(typeof(User_LastPasswordChangedDate));
if (_User_LastPasswordChangedDate == null)
throw new ProviderException("Must have last Password Changed Date field.");
_User_Name = GetProperty(typeof(User_Name));
if (_User_Name == null)
throw new ProviderException("Must have User Name field.");
_User_LastLoginDate = GetProperty(typeof(User_LastLoginDate));
if (_User_LastLoginDate == null)
throw new ProviderException("Must have last Login Date field.");
_User_LastActivityDate = GetProperty(typeof(User_LastActivityDate));
if (_User_LastActivityDate == null)
throw new ProviderException("Must have Last Activity Date field.");
_User_LastPasswordChangedDate = GetProperty(typeof(User_LastPasswordChangedDate));
if (_User_LastPasswordChangedDate == null)
throw new ProviderException("Must have Last Password Changed Date field.");
_User_LastLockedOutDate = GetProperty(typeof(User_LastLockedOutDate));
if (_User_LastLockedOutDate == null)
throw new ProviderException("Must have Last Locked Out Date field.");
_User_Password = GetProperty(typeof(User_Password));
if (_User_Password == null)
throw new ProviderException("Must have User Password field.");
_User_PasswordQuestion = GetProperty(typeof(User_PasswordQuestion));
if (_User_PasswordQuestion == null)
throw new ProviderException("Must have Password Question field.");
_User_PasswordAnswer = GetProperty(typeof(User_PasswordAnswer));
if (_User_PasswordAnswer == null)
throw new ProviderException("Must have Password Answer field.");
_User_IsOnLine = GetProperty(typeof(User_IsOnLine));
if (_User_IsOnLine == null)
throw new ProviderException("Must have Online field.");
_User_FailedPasswordAttemptCount = GetProperty(typeof(User_FailedPasswordAttemptCount));
if (_User_FailedPasswordAttemptCount == null)
throw new ProviderException("Must have Failed Password Attempt Count field.");
_User_FailedPasswordAttemptWindowStart = GetProperty(typeof(User_FailedPasswordAttemptWindowStart));
if (_User_FailedPasswordAttemptWindowStart == null)
throw new ProviderException("Must have Failed Password Attempt Window Start field.");
_User_FailedPasswordAnswerAttemptCount = GetProperty(typeof(User_FailedPasswordAnswerAttemptCount));
if (_User_FailedPasswordAnswerAttemptCount == null)
throw new ProviderException("Must have Failed Password Answer Attempt Count field.");
_User_FailedPasswordAnswerAttemptWindowStart = GetProperty(typeof(User_FailedPasswordAnswerAttemptWindowStart));
if (_User_FailedPasswordAnswerAttemptWindowStart == null)
throw new ProviderException("Must have Failed Password Answer Attempt Window Start field.");
}
#endregion
#region MembershipProvider Properties
private string _ApplicationName;
private bool _EnablePasswordReset;
private bool _EnablePasswordRetrieval;
private bool _RequiresQuestionAndAnswer;
private bool _RequiresUniqueEmail;
private int _MaxInvalidPasswordAttempts;
private int _PasswordAttemptWindow;
private MembershipPasswordFormat _PasswordFormat;
private int _MinRequiredNonAlphanumericCharacters;
private int _MinRequiredPasswordLength;
private string _PasswordStrengthRegularExpression;
public override string ApplicationName
{
get { return _ApplicationName; }
set { _ApplicationName = value; }
}
public override bool EnablePasswordReset
{
get { return _EnablePasswordReset; }
}
public override bool EnablePasswordRetrieval
{
get { return _EnablePasswordRetrieval; }
}
public override bool RequiresQuestionAndAnswer
{
get { return _RequiresQuestionAndAnswer; }
}
public override bool RequiresUniqueEmail
{
get { return _RequiresUniqueEmail; }
}
public override int MaxInvalidPasswordAttempts
{
get { return _MaxInvalidPasswordAttempts; }
}
public override int PasswordAttemptWindow
{
get { return _PasswordAttemptWindow; }
}
public override MembershipPasswordFormat PasswordFormat
{
get { return _PasswordFormat; }
}
public override int MinRequiredNonAlphanumericCharacters
{
get { return _MinRequiredNonAlphanumericCharacters; }
}
public override int MinRequiredPasswordLength
{
get { return _MinRequiredPasswordLength; }
}
public override string PasswordStrengthRegularExpression
{
get { return _PasswordStrengthRegularExpression; }
}
#endregion
#region MembershipProvider methods
public override void Initialize(string name, NameValueCollection config)
{
if (config == null)
throw new ArgumentNullException("config");
if (name == null || name.Length == 0)
name = "CsDOMembershipProvider";
if (String.IsNullOrEmpty(config["description"]))
{
config.Remove("description");
config.Add("description", "CsDO Membership Provider");
}
// Initialize the abstract base class.
base.Initialize(name, config);
_ApplicationName = GetConfigValue(config["applicationName"],
System.Web.Hosting.HostingEnvironment.ApplicationVirtualPath);
_MaxInvalidPasswordAttempts = Convert.ToInt32(
GetConfigValue(config["maxInvalidPasswordAttempts"], "5"));
_PasswordAttemptWindow = Convert.ToInt32(
GetConfigValue(config["passwordAttemptWindow"], "10"));
_MinRequiredNonAlphanumericCharacters = Convert.ToInt32(
GetConfigValue(config["minRequiredNonAlphanumericCharacters"], "1"));
_MinRequiredPasswordLength = Convert.ToInt32(
GetConfigValue(config["minRequiredPasswordLength"], "7"));
_PasswordStrengthRegularExpression = Convert.ToString(
GetConfigValue(config["passwordStrengthRegularExpression"], ""));
_EnablePasswordReset = Convert.ToBoolean(
GetConfigValue(config["enablePasswordReset"], "true"));
_EnablePasswordRetrieval = Convert.ToBoolean(
GetConfigValue(config["enablePasswordRetrieval"], "true"));
_RequiresQuestionAndAnswer = Convert.ToBoolean(
GetConfigValue(config["requiresQuestionAndAnswer"], "false"));
_RequiresUniqueEmail = Convert.ToBoolean(
GetConfigValue(config["requiresUniqueEmail"], "true"));
string temp_format = config["passwordFormat"];
if (temp_format == null)
{
temp_format = "Hashed";
}
switch (temp_format)
{
case "Hashed":
_PasswordFormat = MembershipPasswordFormat.Hashed;
break;
case "Encrypted":
_PasswordFormat = MembershipPasswordFormat.Encrypted;
break;
case "Clear":
_PasswordFormat = MembershipPasswordFormat.Clear;
break;
default:
throw new ProviderException("Password format not supported.");
}
//TODO: not used yet
//assemblyName = config["assemblyName"];
//roleType = config["roleObjectType"];
//usersInRolesType = config["usersInRolesObjectType"];
SetupParameters(config["assemblyName"], config["userObjectType"]);
Configuration cfg =
WebConfigurationManager.OpenWebConfiguration(HostingEnvironment.ApplicationVirtualPath);
machineKey = (MachineKeySection)cfg.GetSection("system.web/machineKey");
}
public override bool ChangePassword(string username, string oldPwd, string newPwd)
{
if (!ValidateUser(username, oldPwd))
return false;
ValidatePasswordEventArgs args =
new ValidatePasswordEventArgs(username, newPwd, true);
OnValidatingPassword(args);
if (args.Cancel)
if (args.FailureInformation != null)
throw args.FailureInformation;
else
throw new MembershipPasswordException("Change password canceled due to new password validation failure.");
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
_User_Password.SetValue(data, EncodePassword(newPwd), null);
_User_LastPasswordChangedDate.SetValue(data, DateTime.Now, null);
return data.update();
}
else
return false;
}
public override bool ChangePasswordQuestionAndAnswer(string username,
string password,
string newPwdQuestion,
string newPwdAnswer)
{
if (!ValidateUser(username, password))
return false;
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
_User_PasswordQuestion.SetValue(data, newPwdQuestion, null);
_User_PasswordAnswer.SetValue(data, EncodePassword(newPwdAnswer), null);
return data.update();
}
else
return false;
}
public override MembershipUser CreateUser(string username,
string password,
string email,
string passwordQuestion,
string passwordAnswer,
bool isApproved,
object providerUserKey,
out MembershipCreateStatus status)
{
ValidatePasswordEventArgs args =
new ValidatePasswordEventArgs(username, password, true);
OnValidatingPassword(args);
if (args.Cancel)
{
status = MembershipCreateStatus.InvalidPassword;
return null;
}
if (RequiresUniqueEmail && GetUserNameByEmail(email) != "")
{
status = MembershipCreateStatus.DuplicateEmail;
return null;
}
MembershipUser u = GetUser(username, false);
if (u == null)
{
DateTime createDate = DateTime.Now;
if (providerUserKey == null)
{
providerUserKey = Guid.NewGuid();
}
else
{
if (!(providerUserKey is Guid))
{
status = MembershipCreateStatus.InvalidProviderUserKey;
return null;
}
}
DataObject data = NewUser();
//TODO: implement it on DataObject
//data.setPrimaryKey(providerUserKey);
_User_Name.SetValue(data, username, null);
_User_Password.SetValue(data, EncodePassword(password), null);
_User_Email.SetValue(data, email, null);
_User_PasswordQuestion.SetValue(data, passwordQuestion, null);
_User_PasswordAnswer.SetValue(data, EncodePassword(passwordAnswer), null);
_User_IsApproved.SetValue(data, isApproved, null);
_User_Comment.SetValue(data, "", null);
_User_CreationDate.SetValue(data, createDate, null);
_User_LastPasswordChangedDate.SetValue(data, createDate, null);
_User_LastActivityDate.SetValue(data, createDate, null);
_User_IsLockedOut.SetValue(data, false, null);
_User_LastLockedOutDate.SetValue(data, createDate, null);
_User_FailedPasswordAttemptCount.SetValue(data, 0, null);
_User_FailedPasswordAttemptWindowStart.SetValue(data, createDate, null);
_User_FailedPasswordAnswerAttemptCount.SetValue(data, 0, null);
_User_FailedPasswordAnswerAttemptWindowStart.SetValue(data, 0, null);
if (data.insert())
{
status = MembershipCreateStatus.Success;
}
else
{
status = MembershipCreateStatus.UserRejected;
}
return GetUser(username, false);
}
else
{
status = MembershipCreateStatus.DuplicateUserName;
}
return null;
}
public override bool DeleteUser(string username, bool deleteAllRelatedData)
{
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
if (deleteAllRelatedData)
{
// Process commands to delete all data for the user in the database.
}
return data.delete();
}
else
return false;
}
public override MembershipUserCollection GetAllUsers(int pageIndex,
int pageSize,
out int totalRecords)
{
DataObject data = NewUser();
MembershipUserCollection users = new MembershipUserCollection();
IList list = data.ToArray(true);
totalRecords = list.Count;
if (totalRecords <= 0) { return users; }
int startIndex = pageSize * pageIndex;
int endIndex = startIndex + pageSize - 1;
for (int i = startIndex; i < endIndex; i++)
{
MembershipUser u = GetUserFromReader((DataObject) list[i]);
users.Add(u);
}
return users;
}
public override int GetNumberOfUsersOnline()
{
TimeSpan onlineSpan = new TimeSpan(0, System.Web.Security.Membership.UserIsOnlineTimeWindow, 0);
DateTime compareTime = DateTime.Now.Subtract(onlineSpan);
DataObject data = NewUser();
IList list = data.ToArray(true);
int numOnline = 0;
foreach (DataObject item in list)
{
if ((DateTime)_User_LastActivityDate.GetValue(item, null) > compareTime)
numOnline++;
}
return numOnline;
}
public override string GetPassword(string username, string answer)
{
if (!EnablePasswordRetrieval)
{
throw new ProviderException("Password Retrieval Not Enabled.");
}
if (PasswordFormat == MembershipPasswordFormat.Hashed)
{
throw new ProviderException("Cannot retrieve Hashed passwords.");
}
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
string password = "";
string passwordAnswer = "";
if (data.fetch())
{
if ((bool) _User_IsLockedOut.GetValue(data, null))
throw new MembershipPasswordException("The supplied user is locked out.");
password = (string) _User_Password.GetValue(data, null);
passwordAnswer = (string) _User_PasswordAnswer.GetValue(data, null);
}
else
{
throw new MembershipPasswordException("The supplied user name is not found.");
}
if (RequiresQuestionAndAnswer && !CheckPassword(answer, passwordAnswer))
{
UpdateFailureCount(username, "passwordAnswer");
throw new MembershipPasswordException("Incorrect password answer.");
}
if (PasswordFormat == MembershipPasswordFormat.Encrypted)
{
password = UnEncodePassword(password);
}
return password;
}
public override MembershipUser GetUser(string username, bool userIsOnline)
{
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
MembershipUser u = null;
u = GetUserFromReader(data);
if (userIsOnline)
{
_User_LastActivityDate.SetValue(data, DateTime.Now, null);
data.update();
}
return u;
}
else
throw new ProviderException("User not found on database");
}
public override MembershipUser GetUser(object providerUserKey, bool userIsOnline)
{
DataObject data = NewUser();
if (data.Get(providerUserKey))
{
MembershipUser u = null;
u = GetUserFromReader(data);
if (userIsOnline)
{
_User_LastActivityDate.SetValue(data, DateTime.Now, null);
data.update();
}
return u;
}
else
throw new ProviderException("User not found on database");
}
public override bool UnlockUser(string username)
{
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
_User_IsLockedOut.SetValue(data, false, null);
_User_LastLockedOutDate.SetValue(data, DateTime.Now, null);
return data.update();
}
else
return false;
}
public override string GetUserNameByEmail(string email)
{
DataObject data = NewUser();
_User_Email.SetValue(data, email, null);
data.find();
string username = "";
if (data.fetch())
{
username = (string)_User_Name.GetValue(data, null);
}
if (username == null)
username = "";
return username;
}
public override string ResetPassword(string username, string answer)
{
if (!EnablePasswordReset)
{
throw new NotSupportedException("Password reset is not enabled.");
}
if (answer == null && RequiresQuestionAndAnswer)
{
UpdateFailureCount(username, "passwordAnswer");
throw new ProviderException("Password answer required for password reset.");
}
string newPassword =
System.Web.Security.Membership.GeneratePassword(newPasswordLength, MinRequiredNonAlphanumericCharacters);
ValidatePasswordEventArgs args =
new ValidatePasswordEventArgs(username, newPassword, true);
OnValidatingPassword(args);
if (args.Cancel)
if (args.FailureInformation != null)
throw args.FailureInformation;
else
throw new MembershipPasswordException("Reset password canceled due to password validation failure.");
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
string passwordAnswer = "";
if ((bool) _User_IsLockedOut.GetValue(data, null))
throw new MembershipPasswordException("The supplied user is locked out.");
passwordAnswer = (string)_User_Password.GetValue(data, null);
if (RequiresQuestionAndAnswer && !CheckPassword(answer, passwordAnswer))
{
UpdateFailureCount(username, "passwordAnswer");
throw new MembershipPasswordException("Incorrect password answer.");
}
_User_Password.SetValue(data, EncodePassword(newPassword), null);
_User_LastPasswordChangedDate.SetValue(data, DateTime.Now, null);
if(data.update())
{
return newPassword;
}
}
else
{
throw new MembershipPasswordException("The supplied user name is not found.");
}
throw new MembershipPasswordException("The supplied password could not be applied.");
}
public override void UpdateUser(MembershipUser user)
{
DataObject data = NewUser();
_User_Name.SetValue(data, user.UserName, null);
data.find();
if (data.fetch())
{
_User_Email.SetValue(data, user.Email, null);
_User_Comment.SetValue(data, user.Comment, null);
_User_IsApproved.SetValue(data, user.IsApproved, null);
data.update();
}
}
public override bool ValidateUser(string username, string password)
{
bool isValid = false;
DataObject data = NewUser();
_User_Name.SetValue(data, username, null);
data.find();
if (data.fetch())
{
bool isApproved = false;
string pwd = "";
pwd = (string) _User_Password.GetValue(data, null);
isApproved = (bool) _User_IsApproved.GetValue(data, null);
if (CheckPassword(password, pwd))
{
if (isApproved)
{
isValid = true;
_User_LastLoginDate.SetValue(data, DateTime.Now, null);
data.update();
}
}
else
{
UpdateFailureCount(username, "password");
}
return isValid;
}
else
return false;
}
public override MembershipUserCollection FindUsersByName(string usernameToMatch,
int pageIndex,
int pageSize,
out int totalRecords)
{
DataObject data = NewUser();
_User_Name.SetValue(data, usernameToMatch, null);
data.find();
IList list = data.ToArray();
totalRecords = list.Count;
MembershipUserCollection users = new MembershipUserCollection();
if (totalRecords > 0)
{
if (totalRecords <= 0) { return users; }
int startIndex = pageSize * pageIndex;
int endIndex = startIndex + pageSize - 1;
for (int i = startIndex; i < endIndex; i++)
{
MembershipUser u = GetUserFromReader((DataObject) list[i]);
users.Add(u);
}
}
return users;
}
public override MembershipUserCollection FindUsersByEmail(string emailToMatch,
int pageIndex,
int pageSize,
out int totalRecords)
{
DataObject data = NewUser();
_User_Email.SetValue(data, emailToMatch, null);
data.find();
MembershipUserCollection users = new MembershipUserCollection();
IList list = data.ToArray();
totalRecords = list.Count;
if (totalRecords > 0)
{
int startIndex = pageSize * pageIndex;
int endIndex = startIndex + pageSize - 1;
for (int i = startIndex; i < endIndex; i++)
{
MembershipUser u = GetUserFromReader((DataObject)list[i]);
users.Add(u);
}
}
return users;
}
#endregion
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Web.Services;
using WebsitePanel.EnterpriseServer.Code.SharePoint;
using WebsitePanel.Providers.SharePoint;
using Microsoft.Web.Services3;
namespace WebsitePanel.EnterpriseServer
{
/// <summary>
/// Summary description for esHostedSharePointServers
/// </summary>
[WebService(Namespace = "http://smbsaas/websitepanel/enterpriseserver")]
[WebServiceBinding(ConformsTo = WsiProfiles.BasicProfile1_1)]
[Policy("ServerPolicy")]
[ToolboxItem(false)]
public class esHostedSharePointServersEnt : WebService
{
/// <summary>
/// Gets site collections in raw form.
/// </summary>
/// <param name="packageId">Package to which desired site collections belong.</param>
/// <param name="organizationId">Organization to which desired site collections belong.</param>
/// <param name="filterColumn">Filter column name.</param>
/// <param name="filterValue">Filter value.</param>
/// <param name="sortColumn">Sort column name.</param>
/// <param name="startRow">Row index to start from.</param>
/// <param name="maximumRows">Maximum number of rows to retrieve.</param>
/// <param name="groupName">Resource group name.</param>
/// <returns>Site collections in raw format.</returns>
[WebMethod]
public SharePointEnterpriseSiteCollectionListPaged Enterprise_GetSiteCollectionsPaged(int packageId, int organizationId,
string filterColumn, string filterValue, string sortColumn, int startRow, int maximumRows)
{
return HostedSharePointServerEntController.GetSiteCollectionsPaged(packageId, organizationId, filterColumn, filterValue,
sortColumn, startRow, maximumRows);
}
/// <summary>
/// Gets list of supported languages by this installation of SharePoint.
/// </summary>
/// <returns>List of supported languages</returns>
[WebMethod]
public int[] Enterprise_GetSupportedLanguages(int packageId)
{
return HostedSharePointServerEntController.GetSupportedLanguages(packageId);
}
/// <summary>
/// Gets list of SharePoint site collections that belong to the package.
/// </summary>
/// <param name="packageId">Package that owns site collections.</param>
/// <param name="recursive">A value which shows whether nested spaces must be searched as well.</param>
/// <param name="groupName">Resource group name.</param>
/// <returns>List of found site collections.</returns>
[WebMethod]
public List<SharePointEnterpriseSiteCollection> Enterprise_GetSiteCollections(int packageId, bool recursive)
{
return HostedSharePointServerEntController.GetSiteCollections(packageId, recursive);
}
[WebMethod]
public int Enterprise_SetStorageSettings(int itemId, int maxStorage, int warningStorage, bool applyToSiteCollections)
{
return HostedSharePointServerEntController.SetStorageSettings(itemId, maxStorage, warningStorage, applyToSiteCollections );
}
/// <summary>
/// Gets SharePoint site collection with given id.
/// </summary>
/// <param name="itemId">Site collection id within metabase.</param>
/// <returns>Site collection.</returns>
[WebMethod]
public SharePointEnterpriseSiteCollection Enterprise_GetSiteCollection(int itemId)
{
return HostedSharePointServerEntController.GetSiteCollection(itemId);
}
/// <summary>
/// Gets SharePoint site collection from package under organization with given domain.
/// </summary>
/// <param name="packageId">Package id.</param>
/// <param name="organizationId">Organization id.</param>
/// <param name="domain">Domain name.</param>
/// <returns>SharePoint site collection or null.</returns>
[WebMethod]
public SharePointEnterpriseSiteCollection Enterprise_GetSiteCollectionByDomain(int organizationId, string domain)
{
DomainInfo domainInfo = ServerController.GetDomain(domain);
SharePointEnterpriseSiteCollectionListPaged existentSiteCollections = this.Enterprise_GetSiteCollectionsPaged(domainInfo.PackageId, organizationId, "ItemName", String.Format("%{0}", domain), String.Empty, 0, Int32.MaxValue);
foreach (SharePointEnterpriseSiteCollection existentSiteCollection in existentSiteCollections.SiteCollections)
{
Uri existentSiteCollectionUri = new Uri(existentSiteCollection.Name);
if (existentSiteCollection.Name == String.Format("{0}://{1}", existentSiteCollectionUri.Scheme, domain))
{
return existentSiteCollection;
}
}
return null;
}
/// <summary>
/// Adds SharePoint site collection.
/// </summary>
/// <param name="item">Site collection description.</param>
/// <returns>Created site collection id within metabase.</returns>
[WebMethod]
public int Enterprise_AddSiteCollection(SharePointEnterpriseSiteCollection item)
{
return HostedSharePointServerEntController.AddSiteCollection(item);
}
/// <summary>
/// Deletes SharePoint site collection with given id.
/// </summary>
/// <param name="itemId">Site collection id within metabase.</param>
/// <returns>?</returns>
[WebMethod]
public int Enterprise_DeleteSiteCollection(int itemId)
{
return HostedSharePointServerEntController.DeleteSiteCollection(itemId);
}
/// <summary>
/// Deletes SharePoint site collections which belong to organization.
/// </summary>
/// <param name="organizationId">Site collection id within metabase.</param>
/// <returns>?</returns>
[WebMethod]
public int Enterprise_DeleteSiteCollections(int organizationId)
{
HostedSharePointServerEntController.DeleteSiteCollections(organizationId);
return 0;
}
/// <summary>
/// Backups SharePoint site collection.
/// </summary>
/// <param name="itemId">Site collection id within metabase.</param>
/// <param name="fileName">Backed up site collection file name.</param>
/// <param name="zipBackup">A value which shows whether back up must be archived.</param>
/// <param name="download">A value which shows whether created back up must be downloaded.</param>
/// <param name="folderName">Local folder to store downloaded backup.</param>
/// <returns>Created backup file name. </returns>
[WebMethod]
public string Enterprise_BackupSiteCollection(int itemId, string fileName, bool zipBackup, bool download, string folderName)
{
return HostedSharePointServerEntController.BackupSiteCollection(itemId, fileName, zipBackup, download, folderName);
}
/// <summary>
/// Restores SharePoint site collection.
/// </summary>
/// <param name="itemId">Site collection id within metabase.</param>
/// <param name="uploadedFile"></param>
/// <param name="packageFile"></param>
/// <returns></returns>
[WebMethod]
public int Enterprise_RestoreSiteCollection(int itemId, string uploadedFile, string packageFile)
{
return HostedSharePointServerEntController.RestoreSiteCollection(itemId, uploadedFile, packageFile);
}
/// <summary>
/// Gets binary data chunk of specified size from specified offset.
/// </summary>
/// <param name="itemId">Item id to obtain realted service id.</param>
/// <param name="path">Path to file to get bunary data chunk from.</param>
/// <param name="offset">Offset from which to start data reading.</param>
/// <param name="length">Binary data chunk length.</param>
/// <returns>Binary data chunk read from file.</returns>
[WebMethod]
public byte[] Enterprise_GetBackupBinaryChunk(int itemId, string path, int offset, int length)
{
return HostedSharePointServerEntController.GetBackupBinaryChunk(itemId, path, offset, length);
}
/// <summary>
/// Appends supplied binary data chunk to file.
/// </summary>
/// <param name="itemId">Item id to obtain realted service id.</param>
/// <param name="fileName">Non existent file name to append to.</param>
/// <param name="path">Full path to existent file to append to.</param>
/// <param name="chunk">Binary data chunk to append to.</param>
/// <returns>Path to file that was appended with chunk.</returns>
[WebMethod]
public string Enterprise_AppendBackupBinaryChunk(int itemId, string fileName, string path, byte[] chunk)
{
return HostedSharePointServerEntController.AppendBackupBinaryChunk(itemId, fileName, path, chunk);
}
[WebMethod]
public SharePointSiteDiskSpace[] Enterprise_CalculateSharePointSitesDiskSpace(int itemId, out int errorCode)
{
return HostedSharePointServerEntController.CalculateSharePointSitesDiskSpace(itemId, out errorCode);
}
[WebMethod]
public void Enterprise_UpdateQuota(int itemId, int siteCollectionId, int maxSize, int warningSize)
{
HostedSharePointServerEntController.UpdateQuota(itemId, siteCollectionId, maxSize, warningSize);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using Microsoft.Azure.Management.Compute;
using Microsoft.Azure.Management.Compute.Models;
using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Rest.ClientRuntime.Azure.TestFramework;
using System;
using System.Collections.Generic;
using System.Linq;
using Xunit;
namespace Compute.Tests
{
public class VMScaleSetExtensionTests : VMScaleSetTestsBase
{
/// <summary>
/// Covers following Operations:
/// Create RG
/// Create Storage Account
/// Create Network Resources
/// Create VMScaleSet
/// Create VMSS Extension
/// Update VMSS Extension
/// Get VMSS Extension
/// List VMSS Extensions
/// Delete VMSS Extension
/// Delete RG
/// </summary>
[Fact]
public void TestVMScaleSetExtensions()
{
using (MockContext context = MockContext.Start(this.GetType()))
{
TestVMScaleSetExtensionsImpl(context);
}
}
[Fact]
public void TestVMScaleSetExtensionSequencing()
{
using (MockContext context = MockContext.Start(this.GetType()))
{
// Create resource group
string rgName = TestUtilities.GenerateName(TestPrefix) + 1;
var vmssName = TestUtilities.GenerateName("vmss");
VirtualMachineScaleSet inputVMScaleSet;
try
{
EnsureClientsInitialized(context);
ImageReference imageRef = GetPlatformVMImage(useWindowsImage: false);
VirtualMachineScaleSetExtensionProfile vmssExtProfile = GetTestVmssExtensionProfile();
// Set extension sequencing (ext2 is provisioned after ext1)
vmssExtProfile.Extensions[1].ProvisionAfterExtensions = new List<string> { vmssExtProfile.Extensions[0].Name };
VirtualMachineScaleSet vmScaleSet = CreateVMScaleSet_NoAsyncTracking(
rgName,
vmssName,
null,
imageRef,
out inputVMScaleSet,
extensionProfile: vmssExtProfile,
createWithManagedDisks: true);
Assert.Equal("PT1H20M", vmScaleSet.VirtualMachineProfile.ExtensionProfile.ExtensionsTimeBudget);
// Perform a Get operation on each extension
VirtualMachineScaleSetExtension getVmssExtResponse = null;
for (int i = 0; i < vmssExtProfile.Extensions.Count; i++)
{
getVmssExtResponse = m_CrpClient.VirtualMachineScaleSetExtensions.Get(rgName, vmssName, vmssExtProfile.Extensions[i].Name);
ValidateVmssExtension(vmssExtProfile.Extensions[i], getVmssExtResponse);
}
// Add a new extension to the VMSS (ext3 is provisioned after ext2)
VirtualMachineScaleSetExtension vmssExtension = GetTestVMSSVMExtension(name: "3", publisher: "Microsoft.CPlat.Core", type: "NullLinux", version: "4.0");
vmssExtension.ProvisionAfterExtensions = new List<string> { vmssExtProfile.Extensions[1].Name };
var response = m_CrpClient.VirtualMachineScaleSetExtensions.CreateOrUpdate(rgName, vmssName, vmssExtension.Name, vmssExtension);
ValidateVmssExtension(vmssExtension, response);
// Perform a Get operation on the extension
getVmssExtResponse = m_CrpClient.VirtualMachineScaleSetExtensions.Get(rgName, vmssName, vmssExtension.Name);
ValidateVmssExtension(vmssExtension, getVmssExtResponse);
// Clear the sequencing in ext3
vmssExtension.ProvisionAfterExtensions.Clear();
var patchVmssExtsResponse = m_CrpClient.VirtualMachineScaleSetExtensions.CreateOrUpdate(rgName, vmssName, vmssExtension.Name, vmssExtension);
ValidateVmssExtension(vmssExtension, patchVmssExtsResponse);
// Perform a List operation on vmss extensions
var listVmssExtsResponse = m_CrpClient.VirtualMachineScaleSetExtensions.List(rgName, vmssName);
int installedExtensionsCount = listVmssExtsResponse.Count();
Assert.Equal(3, installedExtensionsCount);
VirtualMachineScaleSetExtension expectedVmssExt = null;
for (int i = 0; i < installedExtensionsCount; i++)
{
if (i < installedExtensionsCount - 1)
{
expectedVmssExt = vmssExtProfile.Extensions[i];
}
else
{
expectedVmssExt = vmssExtension;
}
ValidateVmssExtension(expectedVmssExt, listVmssExtsResponse.ElementAt(i));
}
}
finally
{
// Cleanup the created resources. But don't wait since it takes too long, and it's not the purpose
// of the test to cover deletion. CSM does persistent retrying over all RG resources.
m_ResourcesClient.ResourceGroups.Delete(rgName);
}
}
}
private VirtualMachineScaleSetExtensionProfile GetTestVmssExtensionProfile()
{
return new VirtualMachineScaleSetExtensionProfile
{
Extensions = new List<VirtualMachineScaleSetExtension>()
{
GetTestVMSSVMExtension(name: "1", publisher: "Microsoft.CPlat.Core", type: "NullSeqA", version: "2.0"),
GetTestVMSSVMExtension(name: "2", publisher: "Microsoft.CPlat.Core", type: "NullSeqB", version: "2.0")
},
ExtensionsTimeBudget = "PT1H20M"
};
}
private void TestVMScaleSetExtensionsImpl(MockContext context)
{
// Create resource group
string rgName = TestUtilities.GenerateName(TestPrefix) + 1;
var vmssName = TestUtilities.GenerateName("vmss");
string storageAccountName = TestUtilities.GenerateName(TestPrefix);
VirtualMachineScaleSet inputVMScaleSet;
bool passed = false;
try
{
EnsureClientsInitialized(context);
ImageReference imageRef = GetPlatformVMImage(useWindowsImage: true);
var storageAccountOutput = CreateStorageAccount(rgName, storageAccountName);
VirtualMachineScaleSet vmScaleSet = CreateVMScaleSet_NoAsyncTracking(
rgName,
vmssName,
storageAccountOutput,
imageRef,
out inputVMScaleSet);
// Add an extension to the VMSS
VirtualMachineScaleSetExtension vmssExtension = GetTestVMSSVMExtension(autoUpdateMinorVersion:false, enableAutomaticUpgrade: false);
vmssExtension.ForceUpdateTag = "RerunExtension";
var response = m_CrpClient.VirtualMachineScaleSetExtensions.CreateOrUpdate(rgName, vmssName, vmssExtension.Name, vmssExtension);
ValidateVmssExtension(vmssExtension, response);
// Perform a Get operation on the extension
var getVmssExtResponse = m_CrpClient.VirtualMachineScaleSetExtensions.Get(rgName, vmssName, vmssExtension.Name);
ValidateVmssExtension(vmssExtension, getVmssExtResponse);
// Validate the extension instance view in the VMSS instance-view
var getVmssWithInstanceViewResponse = m_CrpClient.VirtualMachineScaleSets.GetInstanceView(rgName, vmssName);
ValidateVmssExtensionInstanceView(getVmssWithInstanceViewResponse.Extensions.FirstOrDefault());
// Update an extension in the VMSS
vmssExtension.Settings = string.Empty;
var patchVmssExtsResponse = m_CrpClient.VirtualMachineScaleSetExtensions.CreateOrUpdate(rgName, vmssName, vmssExtension.Name, vmssExtension);
ValidateVmssExtension(vmssExtension, patchVmssExtsResponse);
// Perform a List operation on vmss extensions
var listVmssExtsResponse = m_CrpClient.VirtualMachineScaleSetExtensions.List(rgName, vmssName);
ValidateVmssExtension(vmssExtension, listVmssExtsResponse.FirstOrDefault(c => c.ForceUpdateTag == "RerunExtension"));
// Validate the extension delete API
m_CrpClient.VirtualMachineScaleSetExtensions.Delete(rgName, vmssName, vmssExtension.Name);
passed = true;
}
finally
{
// Cleanup the created resources. But don't wait since it takes too long, and it's not the purpose
// of the test to cover deletion. CSM does persistent retrying over all RG resources.
m_ResourcesClient.ResourceGroups.Delete(rgName);
}
Assert.True(passed);
}
protected void ValidateVmssExtension(VirtualMachineScaleSetExtension vmssExtension, VirtualMachineScaleSetExtension vmssExtensionOut)
{
Assert.NotNull(vmssExtensionOut);
Assert.True(!string.IsNullOrEmpty(vmssExtensionOut.ProvisioningState));
Assert.True(vmssExtension.Publisher == vmssExtensionOut.Publisher);
Assert.True(vmssExtension.Type1 == vmssExtensionOut.Type1);
Assert.True(vmssExtension.AutoUpgradeMinorVersion == vmssExtensionOut.AutoUpgradeMinorVersion);
Assert.True(vmssExtension.TypeHandlerVersion == vmssExtensionOut.TypeHandlerVersion);
Assert.True(vmssExtension.Settings.ToString() == vmssExtensionOut.Settings.ToString());
Assert.True(vmssExtension.ForceUpdateTag == vmssExtensionOut.ForceUpdateTag);
Assert.True(vmssExtension.EnableAutomaticUpgrade == vmssExtensionOut.EnableAutomaticUpgrade);
if (vmssExtension.ProvisionAfterExtensions != null)
{
Assert.True(vmssExtension.ProvisionAfterExtensions.Count == vmssExtensionOut.ProvisionAfterExtensions.Count);
for (int i = 0; i < vmssExtension.ProvisionAfterExtensions.Count; i++)
{
Assert.True(vmssExtension.ProvisionAfterExtensions[i] == vmssExtensionOut.ProvisionAfterExtensions[i]);
}
}
}
protected void ValidateVmssExtensionInstanceView(VirtualMachineScaleSetVMExtensionsSummary vmssExtSummary)
{
Assert.NotNull(vmssExtSummary);
Assert.NotNull(vmssExtSummary.Name);
Assert.NotNull(vmssExtSummary.StatusesSummary[0].Code);
Assert.NotNull(vmssExtSummary.StatusesSummary[0].Count);
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Threading.Tasks;
using Microsoft.Azure.WebJobs.Host;
using Microsoft.Azure.WebJobs.Script.Config;
using Microsoft.Azure.WebJobs.Script.ServiceFabricHost.Properties;
using Microsoft.Extensions.Logging;
namespace Microsoft.Azure.WebJobs.Script.ServiceFabricHost
{
public class SecretManager : IDisposable, ISecretManager
{
private readonly ConcurrentDictionary<string, Dictionary<string, string>> _secretsMap = new ConcurrentDictionary<string, Dictionary<string, string>>();
private readonly IKeyValueConverterFactory _keyValueConverterFactory;
private readonly TraceWriter _traceWriter;
private readonly ILogger _logger;
private readonly ISecretsRepository _repository;
private HostSecretsInfo _hostSecrets;
// for testing
public SecretManager()
{
}
public SecretManager(ScriptSettingsManager settingsManager, ISecretsRepository repository, TraceWriter traceWriter, ILoggerFactory loggerFactory, bool createHostSecretsIfMissing = false)
: this(repository, new DefaultKeyValueConverterFactory(settingsManager), traceWriter, loggerFactory, createHostSecretsIfMissing)
{
}
public SecretManager(ISecretsRepository repository, IKeyValueConverterFactory keyValueConverterFactory, TraceWriter traceWriter, ILoggerFactory loggerFactory, bool createHostSecretsIfMissing = false)
{
_repository = repository;
_keyValueConverterFactory = keyValueConverterFactory;
_traceWriter = traceWriter;
_repository.SecretsChanged += OnSecretsChanged;
_logger = loggerFactory?.CreateLogger(ScriptConstants.LogCategoryHostGeneral);
if (createHostSecretsIfMissing)
{
// The SecretManager implementation of GetHostSecrets will
// create a host secret if one is not present.
GetHostSecretsAsync().GetAwaiter().GetResult();
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
(_repository as IDisposable)?.Dispose();
}
}
public async virtual Task<HostSecretsInfo> GetHostSecretsAsync()
{
if (_hostSecrets == null)
{
HostSecrets hostSecrets = await LoadSecretsAsync<HostSecrets>();
if (hostSecrets == null)
{
_traceWriter.Verbose(Resources.TraceHostSecretGeneration);
_logger?.LogDebug(Resources.TraceHostSecretGeneration);
hostSecrets = GenerateHostSecrets();
await PersistSecretsAsync(hostSecrets);
}
// Host secrets will be in the original persisted state at this point (e.g. encrypted),
// so we read the secrets running them through the appropriate readers
hostSecrets = ReadHostSecrets(hostSecrets);
// If the persistence state of any of our secrets is stale (e.g. the encryption key has been rotated), update
// the state and persist the secrets
if (hostSecrets.HasStaleKeys)
{
_traceWriter.Verbose(Resources.TraceStaleHostSecretRefresh);
_logger?.LogDebug(Resources.TraceStaleHostSecretRefresh);
await RefreshSecretsAsync(hostSecrets);
}
_hostSecrets = new HostSecretsInfo
{
MasterKey = hostSecrets.MasterKey.Value,
FunctionKeys = hostSecrets.FunctionKeys.ToDictionary(s => s.Name, s => s.Value),
SystemKeys = hostSecrets.SystemKeys.ToDictionary(s => s.Name, s => s.Value)
};
}
return _hostSecrets;
}
public async virtual Task<IDictionary<string, string>> GetFunctionSecretsAsync(string functionName, bool merged = false)
{
if (string.IsNullOrEmpty(functionName))
{
throw new ArgumentNullException(nameof(functionName));
}
functionName = functionName.ToLowerInvariant();
Dictionary<string, string> functionSecrets;
_secretsMap.TryGetValue(functionName, out functionSecrets);
if (functionSecrets == null)
{
FunctionSecrets secrets = await LoadFunctionSecretsAsync(functionName);
if (secrets == null)
{
string message = string.Format(Resources.TraceFunctionSecretGeneration, functionName);
_traceWriter.Verbose(message);
_logger?.LogDebug(message);
secrets = new FunctionSecrets
{
Keys = new List<Key>
{
GenerateKey(ScriptConstants.DefaultFunctionKeyName)
}
};
await PersistSecretsAsync(secrets, functionName);
}
// Read all secrets, which will run the keys through the appropriate readers
secrets.Keys = secrets.Keys.Select(k => _keyValueConverterFactory.ReadKey(k)).ToList();
if (secrets.HasStaleKeys)
{
string message = string.Format(Resources.TraceStaleFunctionSecretRefresh, functionName);
_traceWriter.Verbose(message);
_logger?.LogDebug(message);
await RefreshSecretsAsync(secrets, functionName);
}
Dictionary<string, string> result = secrets.Keys.ToDictionary(s => s.Name, s => s.Value);
functionSecrets = _secretsMap.AddOrUpdate(functionName, result, (n, r) => result);
}
if (merged)
{
// If merged is true, we combine function specific keys with host level function keys,
// prioritizing function specific keys
HostSecretsInfo hostSecrets = await GetHostSecretsAsync();
Dictionary<string, string> hostFunctionSecrets = hostSecrets.FunctionKeys;
functionSecrets = functionSecrets.Union(hostFunctionSecrets.Where(s => !functionSecrets.ContainsKey(s.Key)))
.ToDictionary(kv => kv.Key, kv => kv.Value);
}
return functionSecrets;
}
public async Task<KeyOperationResult> AddOrUpdateFunctionSecretAsync(string secretName, string secret, string keyScope, ScriptSecretsType secretsType)
{
Func<ScriptSecrets> secretsFactory = null;
if (secretsType == ScriptSecretsType.Function)
{
secretsFactory = () => new FunctionSecrets(new List<Key>());
}
else if (secretsType == ScriptSecretsType.Host)
{
secretsType = ScriptSecretsType.Host;
secretsFactory = GenerateHostSecrets;
}
else
{
throw new NotSupportedException($"Secrets type {secretsType.ToString("G")} not supported.");
}
KeyOperationResult result = await AddOrUpdateSecretAsync(secretsType, keyScope, secretName, secret, secretsFactory);
string message = string.Format(Resources.TraceAddOrUpdateFunctionSecret, secretsType, secretName, keyScope ?? "host", result.Result);
_traceWriter.Info(message);
_logger?.LogInformation(message);
return result;
}
public async Task<KeyOperationResult> SetMasterKeyAsync(string value = null)
{
HostSecrets secrets = await LoadSecretsAsync<HostSecrets>();
if (secrets == null)
{
secrets = GenerateHostSecrets();
}
OperationResult result;
string masterKey;
if (value == null)
{
// Generate a new secret (clear)
masterKey = GenerateSecret();
result = OperationResult.Created;
}
else
{
// Use the provided secret
masterKey = value;
result = OperationResult.Updated;
}
// Creates a key with the new master key (which will be encrypted, if required)
secrets.MasterKey = CreateKey(ScriptConstants.DefaultMasterKeyName, masterKey);
await PersistSecretsAsync(secrets);
string message = string.Format(Resources.TraceMasterKeyCreatedOrUpdated, result);
_traceWriter.Info(message);
_logger?.LogInformation(message);
return new KeyOperationResult(masterKey, result);
}
public async Task<bool> DeleteSecretAsync(string secretName, string keyScope, ScriptSecretsType secretsType)
{
bool deleted = await ModifyFunctionSecretAsync(secretsType, keyScope, secretName, (secrets, key) =>
{
secrets?.RemoveKey(key, keyScope);
return secrets;
});
if (deleted)
{
string target = secretsType == ScriptSecretsType.Function
? $"Function ('{keyScope}')"
: $"Host (scope: '{keyScope}')";
string message = string.Format(Resources.TraceSecretDeleted, target, secretName);
_traceWriter.Info(message);
_logger?.LogInformation(message);
}
return deleted;
}
private async Task<KeyOperationResult> AddOrUpdateSecretAsync(ScriptSecretsType secretsType, string keyScope,
string secretName, string secret, Func<ScriptSecrets> secretsFactory)
{
OperationResult result = OperationResult.NotFound;
secret = secret ?? GenerateSecret();
await ModifyFunctionSecretsAsync(secretsType, keyScope, secrets =>
{
Key key = secrets.GetFunctionKey(secretName, keyScope);
var createAndUpdateKey = new Action<OperationResult>((o) =>
{
var newKey = CreateKey(secretName, secret);
secrets.AddKey(newKey, keyScope);
result = o;
});
if (key == null)
{
createAndUpdateKey(OperationResult.Created);
}
else if (secrets.RemoveKey(key, keyScope))
{
createAndUpdateKey(OperationResult.Updated);
}
return secrets;
}, secretsFactory);
return new KeyOperationResult(secret, result);
}
private async Task<bool> ModifyFunctionSecretAsync(ScriptSecretsType secretsType, string keyScope, string secretName, Func<ScriptSecrets, Key, ScriptSecrets> keyChangeHandler, Func<ScriptSecrets> secretFactory = null)
{
bool secretFound = false;
await ModifyFunctionSecretsAsync(secretsType, keyScope, secrets =>
{
Key key = secrets?.GetFunctionKey(secretName, keyScope);
if (key != null)
{
secretFound = true;
secrets = keyChangeHandler(secrets, key);
}
return secrets;
}, secretFactory);
return secretFound;
}
private async Task ModifyFunctionSecretsAsync(ScriptSecretsType secretsType, string keyScope, Func<ScriptSecrets, ScriptSecrets> changeHandler, Func<ScriptSecrets> secretFactory)
{
ScriptSecrets currentSecrets = await LoadSecretsAsync(secretsType, keyScope);
if (currentSecrets == null)
{
currentSecrets = secretFactory?.Invoke();
}
var newSecrets = changeHandler(currentSecrets);
if (newSecrets != null)
{
await PersistSecretsAsync(newSecrets, keyScope);
}
}
private Task<FunctionSecrets> LoadFunctionSecretsAsync(string functionName)
=> LoadSecretsAsync<FunctionSecrets>(functionName);
private Task<ScriptSecrets> LoadSecretsAsync(ScriptSecretsType secretsType, string keyScope)
=> LoadSecretsAsync(secretsType, keyScope, s => ScriptSecretSerializer.DeserializeSecrets(secretsType, s));
private async Task<T> LoadSecretsAsync<T>(string keyScope = null) where T : ScriptSecrets
{
ScriptSecretsType type = GetSecretsType<T>();
var result = await LoadSecretsAsync(type, keyScope, ScriptSecretSerializer.DeserializeSecrets<T>);
return result as T;
}
private async Task<ScriptSecrets> LoadSecretsAsync(ScriptSecretsType type, string keyScope, Func<string, ScriptSecrets> deserializationHandler)
{
string secretsJson = await _repository.ReadAsync(type, keyScope).ConfigureAwait(false);
if (!string.IsNullOrEmpty(secretsJson))
{
return deserializationHandler(secretsJson);
}
return null;
}
private static ScriptSecretsType GetSecretsType<T>() where T : ScriptSecrets
{
return typeof(HostSecrets).IsAssignableFrom(typeof(T))
? ScriptSecretsType.Host
: ScriptSecretsType.Function;
}
private HostSecrets GenerateHostSecrets()
{
return new HostSecrets
{
MasterKey = GenerateKey(ScriptConstants.DefaultMasterKeyName),
FunctionKeys = new List<Key>
{
GenerateKey(ScriptConstants.DefaultFunctionKeyName)
},
SystemKeys = new List<Key>()
};
}
private Task RefreshSecretsAsync<T>(T secrets, string keyScope = null) where T : ScriptSecrets
{
var refreshedSecrets = secrets.Refresh(_keyValueConverterFactory);
return PersistSecretsAsync(refreshedSecrets, keyScope);
}
private Task PersistSecretsAsync<T>(T secrets, string keyScope = null) where T : ScriptSecrets
{
string secretsContent = ScriptSecretSerializer.SerializeSecrets<T>(secrets);
return _repository.WriteAsync(secrets.SecretsType, keyScope, secretsContent);
}
private HostSecrets ReadHostSecrets(HostSecrets hostSecrets)
{
return new HostSecrets
{
MasterKey = _keyValueConverterFactory.ReadKey(hostSecrets.MasterKey),
FunctionKeys = hostSecrets.FunctionKeys.Select(k => _keyValueConverterFactory.ReadKey(k)).ToList(),
SystemKeys = hostSecrets.SystemKeys?.Select(k => _keyValueConverterFactory.ReadKey(k)).ToList() ?? new List<Key>()
};
}
private Key GenerateKey(string name = null)
{
string secret = GenerateSecret();
return CreateKey(name, secret);
}
private Key CreateKey(string name, string secret)
{
var key = new Key(name, secret);
return _keyValueConverterFactory.WriteKey(key);
}
private static string GenerateSecret()
{
using (var rng = RandomNumberGenerator.Create())
{
byte[] data = new byte[40];
rng.GetBytes(data);
string secret = Convert.ToBase64String(data);
// Replace pluses as they are problematic as URL values
return secret.Replace('+', 'a');
}
}
private void OnSecretsChanged(object sender, SecretsChangedEventArgs e)
{
// clear the cached secrets if they exist
// they'll be reloaded on demand next time
if (e.SecretsType == ScriptSecretsType.Host)
{
_hostSecrets = null;
}
else
{
Dictionary<string, string> secrets;
_secretsMap.TryRemove(e.Name, out secrets);
}
}
public async Task PurgeOldSecretsAsync(string rootScriptPath, TraceWriter traceWriter, ILogger logger)
{
if (!Directory.Exists(rootScriptPath))
{
return;
}
// Create a lookup of all potential functions (whether they're valid or not)
// It is important that we determine functions based on the presence of a folder,
// not whether we've identified a valid function from that folder. This ensures
// that we don't delete logs/secrets for functions that transition into/out of
// invalid unparsable states.
var currentFunctions = Directory.EnumerateDirectories(rootScriptPath).Select(p => Path.GetFileName(p)).ToList();
await _repository.PurgeOldSecretsAsync(currentFunctions, traceWriter, logger);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Linq;
using System.Globalization;
using System.Collections.Generic;
namespace System.Buffers.Text.Tests
{
//
// General purpose raw test data.
//
internal static partial class TestData
{
public static readonly IEnumerable<byte> s_precisions = new byte[] { StandardFormat.NoPrecision, 0, 1, 3, 10, StandardFormat.MaxPrecision };
public static IEnumerable<object[]> IntegerTypesTheoryData => IntegerTypes.Select(t => new object[] { t });
public static IEnumerable<Type> IntegerTypes
{
get
{
yield return typeof(sbyte);
yield return typeof(byte);
yield return typeof(short);
yield return typeof(ushort);
yield return typeof(int);
yield return typeof(uint);
yield return typeof(long);
yield return typeof(ulong);
}
}
public static IEnumerable<bool> BooleanTestData
{
get
{
yield return true;
yield return false;
}
}
public static IEnumerable<sbyte> SByteTestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= sbyte.MinValue && l <= sbyte.MaxValue)
{
yield return (sbyte)l;
}
}
}
}
public static IEnumerable<byte> ByteTestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= byte.MinValue && l <= byte.MaxValue)
{
yield return (byte)l;
}
}
}
}
public static IEnumerable<short> Int16TestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= short.MinValue && l <= short.MaxValue)
{
yield return (short)l;
}
}
}
}
public static IEnumerable<ushort> UInt16TestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= ushort.MinValue && l <= ushort.MaxValue)
{
yield return (ushort)l;
}
}
}
}
public static IEnumerable<int> Int32TestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= int.MinValue && l <= int.MaxValue)
{
yield return (int)l;
}
}
}
}
public static IEnumerable<uint> UInt32TestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= uint.MinValue && l <= uint.MaxValue)
{
yield return (uint)l;
}
}
}
}
public static IEnumerable<long> Int64TestData
{
get
{
yield return 0L;
yield return 1L;
yield return 123L;
yield return -123L;
yield return 1234L;
yield return -1234L;
yield return 12345L;
yield return -12345L;
yield return 4294967294999999999L; // uint.MaxValue * Billion - 1
yield return 4294967295000000000L; // uint.MaxValue * Billion
yield return 4294967295000000001L; // uint.MaxValue * Billion + 1
yield return -4294967294999999999L; // -(uint.MaxValue * Billion - 1)
yield return -4294967295000000000L; // -(uint.MaxValue * Billion)
yield return -4294967295000000001L; // -(uint.MaxValue * Billion + 1)
yield return 4294967296000000000L; // (uint.MaxValue + 1) * Billion
yield return -4294967296000000000L; // -(uint.MaxValue + 1) * Billion
long powerOf10 = 1L;
for (int i = 0; i < 21; i++)
{
powerOf10 *= 10L;
yield return powerOf10 - 1;
yield return powerOf10;
yield return -(powerOf10 - 1);
yield return -powerOf10;
}
yield return sbyte.MinValue;
yield return sbyte.MinValue - 1;
yield return sbyte.MinValue + 1;
yield return sbyte.MaxValue;
yield return sbyte.MaxValue - 1;
yield return sbyte.MaxValue + 1;
yield return short.MinValue;
yield return short.MinValue - 1;
yield return short.MinValue + 1;
yield return short.MaxValue;
yield return short.MaxValue - 1;
yield return short.MaxValue + 1;
yield return int.MinValue;
yield return ((long)int.MinValue) - 1;
yield return int.MinValue + 1;
yield return int.MaxValue;
yield return int.MaxValue - 1;
yield return ((long)int.MaxValue) + 1;
yield return long.MinValue;
yield return long.MinValue + 1;
yield return long.MaxValue;
yield return long.MaxValue - 1;
yield return byte.MaxValue;
yield return byte.MaxValue - 1;
yield return ushort.MaxValue;
yield return ushort.MaxValue - 1;
yield return uint.MaxValue;
yield return uint.MaxValue - 1;
}
}
public static IEnumerable<ulong> UInt64TestData
{
get
{
foreach (long l in Int64TestData)
{
if (l >= 0)
{
yield return (ulong)l;
}
}
yield return long.MaxValue + 1LU;
yield return ulong.MaxValue - 1LU;
yield return ulong.MaxValue;
}
}
public static IEnumerable<decimal> DecimalTestData
{
get
{
foreach (long l in Int64TestData)
{
yield return l;
}
yield return decimal.MinValue;
yield return decimal.MaxValue;
// negative 0m. The formatter is expected *not* to emit a minus sign in this case.
yield return (new MutableDecimal() { High = 0, Mid = 0, Low = 0, IsNegative = true }).ToDecimal();
yield return 0.304m; // Round down
yield return -0.304m;
yield return 0.305m; // Round up
yield return -0.305m;
yield return 999.99m;
yield return -999.99m;
yield return 0.000123456m;
yield return -0.000123456m;
// Explicit trailing 0's (Decimal can and does preserve these by setting the Scale appropriately)
yield return 1.00m;
yield return 0.00m;
yield return -1.00m;
yield return -0.00m;
}
}
public static IEnumerable<double> DoubleTestData
{
get
{
foreach (long l in Int64TestData)
{
yield return l;
}
yield return 1.23;
}
}
public static IEnumerable<float> SingleTestData
{
get
{
foreach (long d in DoubleTestData)
{
float f = d;
if (!float.IsInfinity(f))
yield return f;
}
}
}
public static IEnumerable<Guid> GuidTestData
{
get
{
yield return new Guid("CB0AFB61-6F04-401A-BBEA-C0FC0B6E4E51");
yield return new Guid("FC1911F9-9EED-4CA8-AC8B-CEEE1EBE2C72");
}
}
public static IEnumerable<DateTime> DateTimeTestData
{
get
{
{
// Kind == Unspecified
TimeSpan offset = new TimeSpan(hours: 8, minutes: 0, seconds: 0);
DateTimeOffset dto = new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, offset: offset);
yield return dto.DateTime;
}
{
// Kind == Utc
TimeSpan offset = new TimeSpan(hours: 8, minutes: 0, seconds: 0);
DateTimeOffset dto = new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, offset: offset);
yield return dto.UtcDateTime;
}
{
// Kind == Local
TimeSpan offset = new TimeSpan(hours: 8, minutes: 0, seconds: 0);
DateTimeOffset dto = new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, offset: offset);
yield return dto.LocalDateTime;
}
{
// Kind == Local
TimeSpan offset = new TimeSpan(hours: -9, minutes: 0, seconds: 0);
DateTimeOffset dto = new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, offset: offset);
yield return dto.LocalDateTime;
}
}
}
public static IEnumerable<DateTimeOffset> DateTimeOffsetTestData
{
get
{
yield return DateTimeOffset.MinValue;
yield return DateTimeOffset.MaxValue;
yield return new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, new TimeSpan(hours: 0, minutes: 30, seconds: 0));
yield return new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, new TimeSpan(hours: 0, minutes: -30, seconds: 0));
yield return new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, new TimeSpan(hours: 8, minutes: 0, seconds: 0));
yield return new DateTimeOffset(year: 2017, month: 1, day: 13, hour: 3, minute: 45, second: 32, new TimeSpan(hours: -8, minutes: 0, seconds: 0));
yield return new DateTimeOffset(year: 2017, month: 12, day: 31, hour: 23, minute: 59, second: 58, new TimeSpan(hours: 14, minutes: 0, seconds: 0));
yield return new DateTimeOffset(year: 2017, month: 12, day: 31, hour: 23, minute: 59, second: 58, new TimeSpan(hours: -14, minutes: 0, seconds: 0));
foreach (PseudoDateTime pseudoDateTime in PseudoDateTimeTestData)
{
if (pseudoDateTime.ExpectSuccess)
{
TimeSpan offset = new TimeSpan(hours: pseudoDateTime.OffsetHours, minutes: pseudoDateTime.OffsetMinutes, seconds: 0);
if (pseudoDateTime.OffsetNegative)
{
offset = -offset;
}
DateTimeOffset dto = new DateTimeOffset(year: pseudoDateTime.Year, month: pseudoDateTime.Month, day: pseudoDateTime.Day, hour: pseudoDateTime.Hour, minute: pseudoDateTime.Minute, second: pseudoDateTime.Second, offset: offset);
if (pseudoDateTime.Fraction != 0)
{
dto += new TimeSpan(ticks: pseudoDateTime.Fraction);
}
yield return dto;
}
}
}
}
public static IEnumerable<TimeSpan> TimeSpanTestData
{
get
{
yield return TimeSpan.MinValue;
yield return TimeSpan.MaxValue;
yield return new TimeSpan(ticks: 0);
yield return new TimeSpan(ticks: 1);
yield return new TimeSpan(ticks: -1);
yield return new TimeSpan(ticks: 12345L);
yield return new TimeSpan(ticks: -12345L);
yield return new TimeSpan(days: 4, hours: 9, minutes: 8, seconds: 6, milliseconds: 0);
yield return new TimeSpan(days: -4, hours: 9, minutes: 8, seconds: 6, milliseconds: 0);
yield return new TimeSpan(days: 4, hours: 9, minutes: 8, seconds: 6, milliseconds: 5);
yield return new TimeSpan(days: -4, hours: 9, minutes: 8, seconds: 6, milliseconds: 5);
yield return new TimeSpan(days: 54, hours: 10, minutes: 11, seconds: 12, milliseconds: 13);
yield return new TimeSpan(days: -54, hours: 10, minutes: 11, seconds: 12, milliseconds: 13);
yield return new TimeSpan(days: 54, hours: 10, minutes: 11, seconds: 12, milliseconds: 999);
}
}
public static IEnumerable<string> NumberTestData
{
get
{
yield return "";
yield return "+";
yield return "-";
yield return "0";
yield return "+0";
yield return "-0";
yield return "0.0";
yield return "-0.0";
yield return "123.45";
yield return "+123.45";
yield return "-123.45";
yield return "++123.45";
yield return "--123.45";
yield return "5.";
yield return ".6";
yield return "5.";
yield return ".";
yield return "000000123.45";
yield return "0.000045";
yield return "000000123.000045";
yield return decimal.MinValue.ToString("G", CultureInfo.InvariantCulture);
yield return decimal.MaxValue.ToString("G", CultureInfo.InvariantCulture);
yield return float.MinValue.ToString("G9", CultureInfo.InvariantCulture);
yield return float.MaxValue.ToString("G9", CultureInfo.InvariantCulture);
yield return float.Epsilon.ToString("G9", CultureInfo.InvariantCulture);
yield return double.MinValue.ToString("G17", CultureInfo.InvariantCulture);
yield return double.MaxValue.ToString("G17", CultureInfo.InvariantCulture);
yield return double.Epsilon.ToString("G9", CultureInfo.InvariantCulture);
yield return "1e";
yield return "1e+";
yield return "1e-";
yield return "1e10";
yield return "1e+10";
yield return "1e-10";
yield return "1E10";
yield return "1E+10";
yield return "1E-10";
yield return "1e+9";
yield return "1e-9";
yield return "1e+9";
yield return "1e+90";
yield return "1e-90";
yield return "1e+90";
yield return "1e+400";
yield return "1e-400";
yield return "1e+400";
yield return "-1e+400";
yield return "-1e-400";
yield return "-1e+400";
yield return "1e+/";
yield return "1e/";
yield return "1e-/";
yield return "1e+:";
yield return "1e:";
yield return "1e-:";
yield return "1e";
yield return "1e/";
yield return "1e:";
yield return "0.5555555555555555555555555555555555555555555555555";
yield return "0.66666666666666666666666666665";
yield return "0.6666666666666666666666666666500000000000000000000000000000000000000000000000000000000000000";
yield return "0.6666666666666666666666666666500000000000000000000";
yield return "0.6666666666666666666666666666666666666666666666665";
yield return "0.9999999999999999999999999999999999999999999999999";
// Wacky case that's expected to yield "Decimal.MaxValue / 10"
// ( = 7922816251426433759354395034m (= High = 0x19999999, Mid = 0x99999999, Low = 0x9999999A))
// and does thanks to a special overflow code path inside the Number->Decimal converter.
yield return "0.79228162514264337593543950335" + "5" + "E28";
// Exercise post-rounding overflow check.
yield return "0.79228162514264337593543950335" + "5" + "E29";
// Excercise the 20-digit lookahead inside the rounding logic inside the Number->Decimal converter.
yield return "0.222222222222222222222222222255000000000000000000000000000000000000";
// Code coverage for MutableDecimal.DecAdd()
yield return "4611686018427387903.752";
// Code coverage: "round X where {Epsilon > X >= 2.470328229206232730000000E-324} up to Epsilon"
yield return "2.470328229206232730000000E-324";
// Code coverage: underflow
yield return "2.470328229206232730000000E-325";
yield return "3.402823E+38"; //Single.MaxValue
yield return "3.402824E+38"; //Just over Single.MaxValue
yield return "-3.402823E+38"; //Single.MinValue
yield return "-3.402824E+38"; //Just under Single.MinValue
yield return "1.79769313486232E+308"; //Double.MaxValue
yield return "1.79769313486233E+308"; //Just over Double.MaxValue
yield return "-1.79769313486232E+308"; //Double.MinValue
yield return "-1.79769313486233E+308"; //Just under Double.MinValue
// Ensures that the NumberBuffer capacity is consistent with Desktop's.
yield return ".2222222222222222222222222222500000000000000000001";
}
}
public static IEnumerable<PseudoDateTime> PseudoDateTimeTestData
{
get
{
foreach (int year in new int[] { 2000, 2001, 2002, 2003, 2004, 2010, 2012, 2013, 2014, 2, 9999 })
{
for (int month = 1; month <= 12; month++)
{
int daysInMonth = DateTime.DaysInMonth(year: year, month: month);
foreach (int day in new int[] { 1, 9, 10, daysInMonth })
{
yield return new PseudoDateTime(year: year, month: month, day: day, hour: 3, minute: 15, second: 45, expectSuccess: true);
}
yield return new PseudoDateTime(year: year, month: month, day: (daysInMonth + 1), hour: 23, minute: 15, second: 45, expectSuccess: false);
}
}
// Test data at the edge of the valid ranges.
yield return new PseudoDateTime(year: 1, month: 1, day: 1, hour: 14, minute: 0, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 9999, month: 12, day: 31, hour: 9, minute: 0, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 14, minute: 0, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 12, day: 1, hour: 14, minute: 0, second: 0, expectSuccess: true);
// Day range is month/year dependent. Was already covered above.
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 14, minute: 0, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 23, minute: 0, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 59, second: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 59, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 9999999, offsetNegative: false, offsetHours: 0, offsetMinutes: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: false, offsetHours: 13, offsetMinutes: 59, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: false, offsetHours: 14, offsetMinutes: 0, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: true, offsetHours: 13, offsetMinutes: 59, expectSuccess: true);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: true, offsetHours: 14, offsetMinutes: 0, expectSuccess: true);
// Test data outside the valid ranges.
yield return new PseudoDateTime(year: 0, month: 1, day: 1, hour: 24, minute: 0, second: 0, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 0, day: 1, hour: 24, minute: 0, second: 0, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 13, day: 1, hour: 24, minute: 0, second: 0, expectSuccess: false);
// Day range is month/year dependent. Was already covered above.
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 60, minute: 0, second: 0, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 60, second: 0, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 60, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: true, offsetHours: 0, offsetMinutes: 60, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: false, offsetHours: 14, offsetMinutes: 1, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: false, offsetHours: 15, offsetMinutes: 0, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: true, offsetHours: 14, offsetMinutes: 1, expectSuccess: false);
yield return new PseudoDateTime(year: 2017, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: true, offsetHours: 15, offsetMinutes: 0, expectSuccess: false);
// Past the end of time.
yield return new PseudoDateTime(year: 9999, month: 12, day: 31, hour: 23, minute: 59, second: 59, fraction: 9999999, offsetNegative: true, offsetHours: 0, offsetMinutes: 1, expectSuccess: false);
yield return new PseudoDateTime(year: 1, month: 1, day: 1, hour: 0, minute: 0, second: 0, fraction: 0, offsetNegative: false, offsetHours: 0, offsetMinutes: 1, expectSuccess: false);
}
}
}
}
| |
using System;
using System.Data;
using System.Data.Common;
namespace Arrowgene.Services.Database
{
/// <summary>
/// Operations for SQL type databases.
/// </summary>
public abstract class SqlOperations<TCon, TCom>
where TCon : DbConnection
where TCom : DbCommand
{
public const int NoRowsAffected = 0;
public const long NoAutoIncrement = 0;
public SqlOperations()
{
}
protected abstract TCon Connection();
protected abstract TCom Command(string query, TCon connection);
protected abstract long AutoIncrement(TCon connection, TCom command);
public abstract int Upsert(string table, string[] columns, object[] values, string whereColumn,
object whereValue, out long autoIncrement);
public int ExecuteNonQuery(string query, Action<TCom> nonQueryAction)
{
try
{
using (TCon connection = Connection())
{
using (TCom command = Command(query, connection))
{
nonQueryAction(command);
int rowsAffected = command.ExecuteNonQuery();
return rowsAffected;
}
}
}
catch (Exception ex)
{
Exception(ex);
return NoRowsAffected;
}
}
public int ExecuteNonQuery(string query, Action<TCom> nonQueryAction, out long autoIncrement)
{
try
{
using (TCon connection = Connection())
{
using (TCom command = Command(query, connection))
{
nonQueryAction(command);
int rowsAffected = command.ExecuteNonQuery();
autoIncrement = AutoIncrement(connection, command);
return rowsAffected;
}
}
}
catch (Exception ex)
{
Exception(ex);
autoIncrement = NoAutoIncrement;
return NoRowsAffected;
}
}
public void ExecuteReader(string query, Action<TCom> nonQueryAction, Action<DbDataReader> readAction)
{
try
{
using (TCon connection = Connection())
{
using (TCom command = Command(query, connection))
{
nonQueryAction(command);
using (DbDataReader reader = command.ExecuteReader())
{
readAction(reader);
}
}
}
}
catch (Exception ex)
{
Exception(ex);
}
}
public void ExecuteReader(string query, Action<DbDataReader> readAction)
{
try
{
using (TCon connection = Connection())
{
using (TCom command = Command(query, connection))
{
using (DbDataReader reader = command.ExecuteReader())
{
readAction(reader);
}
}
}
}
catch (Exception ex)
{
Exception(ex);
}
}
public void Execute(string query)
{
try
{
using (TCon connection = Connection())
{
using (TCom command = Command(query, connection))
{
command.ExecuteNonQuery();
}
}
}
catch (Exception ex)
{
Exception(ex);
}
}
public string ServerVersion()
{
try
{
using (TCon connection = Connection())
{
return connection.ServerVersion;
}
}
catch (Exception ex)
{
Exception(ex);
return string.Empty;
}
}
protected virtual void Exception(Exception ex)
{
throw ex;
}
protected DbParameter Parameter(TCom command, string name, object value, DbType type)
{
DbParameter parameter = command.CreateParameter();
parameter.ParameterName = name;
parameter.Value = value;
parameter.DbType = type;
return parameter;
}
protected DbParameter Parameter(TCom command, string name, string value)
{
return Parameter(command, name, value, DbType.String);
}
protected void AddParameter(TCom command, string name, object value, DbType type)
{
DbParameter parameter = Parameter(command, name, value, type);
command.Parameters.Add(parameter);
}
protected void AddParameter(TCom command, string name, string value)
{
AddParameter(command, name, value, DbType.String);
}
protected void AddParameter(TCom command, string name, Int32 value)
{
AddParameter(command, name, value, DbType.Int32);
}
protected void AddParameter(TCom command, string name, float value)
{
AddParameter(command, name, value, DbType.Double);
}
protected void AddParameter(TCom command, string name, byte value)
{
AddParameter(command, name, value, DbType.Byte);
}
protected void AddParameter(TCom command, string name, UInt32 value)
{
AddParameter(command, name, value, DbType.UInt32);
}
protected void AddParameterEnumInt32<T>(TCom command, string name, T value) where T : Enum
{
AddParameter(command, name, (Int32) (object) value, DbType.Int32);
}
protected void AddParameter(TCom command, string name, DateTime? value)
{
AddParameter(command, name, value, DbType.DateTime);
}
protected void AddParameter(TCom command, string name, DateTime value)
{
AddParameter(command, name, value, DbType.DateTime);
}
protected void AddParameter(TCom command, string name, bool value)
{
AddParameter(command, name, value, DbType.Boolean);
}
protected DateTime? GetDateTimeNullable(DbDataReader reader, int ordinal)
{
if (reader.IsDBNull(ordinal))
{
return null;
}
return reader.GetDateTime(ordinal);
}
protected string GetStringNullable(DbDataReader reader, int ordinal)
{
if (reader.IsDBNull(ordinal))
{
return null;
}
return reader.GetString(ordinal);
}
protected int GetInt32(DbDataReader reader, string column)
{
return reader.GetInt32(reader.GetOrdinal(column));
}
protected byte GetByte(DbDataReader reader, string column)
{
return reader.GetByte(reader.GetOrdinal(column));
}
protected short GetInt16(DbDataReader reader, string column)
{
return reader.GetInt16(reader.GetOrdinal(column));
}
protected float GetFloat(DbDataReader reader, string column)
{
return reader.GetFloat(reader.GetOrdinal(column));
}
protected string GetString(DbDataReader reader, string column)
{
return reader.GetString(reader.GetOrdinal(column));
}
protected bool GetBoolean(DbDataReader reader, string column)
{
return reader.GetBoolean(reader.GetOrdinal(column));
}
protected DateTime GetDateTime(DbDataReader reader, string column)
{
return reader.GetDateTime(reader.GetOrdinal(column));
}
protected DateTime? GetDateTimeNullable(DbDataReader reader, string column)
{
int ordinal = reader.GetOrdinal(column);
return GetDateTimeNullable(reader, ordinal);
}
protected string GetStringNullable(DbDataReader reader, string column)
{
int ordinal = reader.GetOrdinal(column);
return GetStringNullable(reader, ordinal);
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System.Collections.Generic;
using System.Linq;
using IBApi;
using NUnit.Framework;
using QuantConnect.Algorithm;
using QuantConnect.Brokerages.InteractiveBrokers;
using QuantConnect.Data.Auxiliary;
using QuantConnect.Lean.Engine.DataFeeds;
using QuantConnect.Logging;
namespace QuantConnect.Tests.Brokerages.InteractiveBrokers
{
[TestFixture]
[Ignore("These tests require the IBGateway to be installed.")]
public class InteractiveBrokersFuturesTests
{
[Test]
public void CreatesExpectedFuturesContracts()
{
var symbolMapper = new InteractiveBrokersSymbolMapper(TestGlobals.MapFileProvider);
using (var ib = new InteractiveBrokersBrokerage(new QCAlgorithm(), new OrderProvider(), new SecurityProvider(), new AggregationManager(), TestGlobals.MapFileProvider))
{
ib.Connect();
Assert.IsTrue(ib.IsConnected);
var ibMarkets = new Dictionary<string, string>
{
{ Market.CME, "GLOBEX" },
{ Market.NYMEX, "NYMEX" },
{ Market.COMEX, "NYMEX" },
{ Market.CBOT, "ECBOT" },
{ Market.ICE, "NYBOT" },
{ Market.CFE, "CFE" }
};
var tickersByMarket = new Dictionary<string, string[]>
{
{
Market.CFE,
new[]
{
"VX"
}
},
{
Market.CBOT,
new[]
{
"AW",
//"BCF",
//"BWF",
"EH",
"F1U",
"KE",
"TN",
"UB",
"YM",
"ZB",
"ZC",
"ZF",
"ZL",
"ZM",
"ZN",
"ZO",
"ZS",
"ZT",
"ZW",
}
},
{
Market.CME,
new[]
{
"6A",
"6B",
"6C",
"6E",
"6J",
"6L",
"6M",
"6N",
"6R",
"6S",
"6Z",
//"ACD",
//"AJY",
//"ANE",
"BIO",
"BTC",
"CB",
//"CJY",
//"CNH",
"CSC",
//"DC",
"DY",
"E7",
//"EAD",
//"ECD",
//"EI",
"EMD",
"ES",
//"ESK",
"GD",
"GDK",
"GE",
"GF",
//"GNF",
"HE",
//"IBV",
"J7",
//"LBS",
"LE",
"NKD",
"NQ",
"RTY",
}
},
{
Market.COMEX,
new[]
{
//"AUP",
//"EDP",
"GC",
"HG",
"SI",
}
},
{
Market.ICE,
new[]
{
"B",
"CC",
"CT",
"DX",
"G",
"KC",
"OJ",
"SB",
}
},
{
Market.NYMEX,
new[]
{
//"1S",
//"22",
//"A0D",
//"A0F",
//"A1L",
//"A1M",
//"A1R",
//"A32",
//"A3G",
//"A7E",
//"A7I",
//"A7Q",
//"A8J",
//"A8K",
//"A8O",
//"A91",
//"A9N",
//"AA6",
//"AA8",
//"ABS",
"ABT",
//"AC0",
//"AD0",
//"ADB",
//"AE5",
//"AGA",
//"AJL",
//"AJS",
//"AKL",
//"AKZ",
//"APS",
//"AR0",
"ARE",
//"AVZ",
//"AYV",
//"AYX",
//"AZ1",
//"B0",
//"B7H",
"BK",
//"BOO",
//"BR7",
"BZ",
"CL",
//"CRB",
//"CSW",
"CSX",
//"CU",
//"D1N",
//"DCB",
//"E6",
//"EN",
//"EPN",
//"EVC",
"EWG",
//"EWN",
"EXR",
//"FO",
"FRC",
//"FSS",
//"GCU",
//"HCL",
"HH",
"HO",
"HP",
"HRC",
//"HTT",
"NG",
"PA",
"PL",
"RB",
//"YO",
}
}
};
foreach (var kvp in tickersByMarket)
{
var market = kvp.Key;
var tickers = kvp.Value;
foreach (var ticker in tickers)
{
var contract = new Contract
{
Symbol = symbolMapper.GetBrokerageRootSymbol(ticker),
Currency = Currencies.USD,
Exchange = null,
SecType = "FUT"
};
Log.Trace($"Market: {market} - Future Ticker: {ticker}");
var results = ib.FindContracts(contract, contract.Symbol);
foreach (var contractDetails in results.Where(x => ibMarkets.Values.Contains(x.Contract.Exchange)))
{
var message = $" - ContractDetails: {contractDetails.Contract} {contractDetails.ContractMonth}";
Log.Trace(message);
Assert.AreEqual(ibMarkets[market], contractDetails.Contract.Exchange, message);
}
}
}
}
}
[Test]
public void CreateExpectedFutureContractsWithDifferentCurrencies()
{
using (var ib = new InteractiveBrokersBrokerage(new QCAlgorithm(), new OrderProvider(), new SecurityProvider(), new AggregationManager(), TestGlobals.MapFileProvider))
{
ib.Connect();
Assert.IsTrue(ib.IsConnected);
var tickersByMarket = new Dictionary<string, string[]>
{
{
Market.HKFE,
new[]
{
"HSI"
}
},
{
Market.CME,
new[]
{
"ACD",
"AJY",
"ANE"
}
},
{
Market.CBOT,
new[]
{
"ZC"
}
}
};
foreach (var kvp in tickersByMarket)
{
var market = kvp.Key;
var tickers = kvp.Value;
foreach (var ticker in tickers)
{
var currentSymbol = Symbol.Create(ticker, SecurityType.Future, market);
var symbolsFound = ib.LookupSymbols(currentSymbol, false);
Assert.IsNotEmpty(symbolsFound);
foreach (var symbol in symbolsFound)
{
Log.Trace($"Symbol found in IB: {symbol}");
}
}
}
}
}
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
--********************************************************************/
using System.IO;
using System.Collections;
using System.Collections.ObjectModel;
using System.Xml;
using System.Reflection;
namespace System.Management.Automation
{
/// <summary>
/// Class FaqHelpProvider implement the help provider for faq's.
/// </summary>
///
/// <remarks>
/// Faq Help information are stored in 'faq.xml' files. These files are
/// located in the Monad / CustomShell Path as well as in the Application Base
/// of PSSnapIns
/// </remarks>
internal class FaqHelpProvider : HelpProviderWithFullCache
{
/// <summary>
/// Constructor for FaqHelpProvider
/// </summary>
internal FaqHelpProvider(HelpSystem helpSystem) : base(helpSystem)
{
}
#region Common Properties
/// <summary>
/// Name of this provider
/// </summary>
/// <value>Name of this provider</value>
internal override string Name
{
get
{
return "FAQ Help Provider";
}
}
/// <summary>
/// Help category for this provider, which is a constant: HelpCategory.FAQ.
/// </summary>
/// <value>Help category for this provider</value>
internal override HelpCategory HelpCategory
{
get
{
return HelpCategory.FAQ;
}
}
#endregion
#region Load cache
/// <summary>
/// Load cache for faq help's
/// </summary>
internal sealed override void LoadCache()
{
Collection<String> files = MUIFileSearcher.SearchFiles("*.faq.xml", GetSearchPaths());
if (files == null)
return;
foreach (string file in files)
{
if (!_helpFiles.ContainsKey(file))
{
LoadHelpFile(file);
// Add this file into _helpFiles hashtable to prevent it to be loaded again.
_helpFiles[file] = 0;
}
}
}
/// <summary>
/// Load help file for HelpInfo objects. The HelpInfo objects will be
/// put into help cache.
/// </summary>
/// <remarks>
/// 1. Needs to pay special attention about error handling in this function.
/// Common errors include: file not found and invalid xml. None of these error
/// should cause help search to stop.
/// </remarks>
/// <param name="helpFile"></param>
private void LoadHelpFile(string helpFile)
{
if (String.IsNullOrEmpty(helpFile))
{
return;
}
XmlDocument doc;
try
{
doc = InternalDeserializer.LoadUnsafeXmlDocument(
new FileInfo(helpFile),
false, /* ignore whitespace, comments, etc. */
null); /* default maxCharactersInDocument */
}
catch (IOException ioException)
{
ErrorRecord errorRecord = new ErrorRecord(ioException, "HelpFileLoadFailure", ErrorCategory.OpenError, null);
errorRecord.ErrorDetails = new ErrorDetails(typeof(FaqHelpProvider).GetTypeInfo().Assembly, "HelpErrors", "HelpFileLoadFailure", helpFile, ioException.Message);
this.HelpSystem.LastErrors.Add(errorRecord);
return;
}
catch (System.Security.SecurityException securityException)
{
ErrorRecord errorRecord = new ErrorRecord(securityException, "HelpFileNotAccessible", ErrorCategory.OpenError, null);
errorRecord.ErrorDetails = new ErrorDetails(typeof(FaqHelpProvider).GetTypeInfo().Assembly, "HelpErrors", "HelpFileNotAccessible", helpFile, securityException.Message);
this.HelpSystem.LastErrors.Add(errorRecord);
return;
}
catch (XmlException xmlException)
{
ErrorRecord errorRecord = new ErrorRecord(xmlException, "HelpFileNotValid", ErrorCategory.SyntaxError, null);
errorRecord.ErrorDetails = new ErrorDetails(typeof(FaqHelpProvider).GetTypeInfo().Assembly, "HelpErrors", "HelpFileNotValid", helpFile, xmlException.Message);
this.HelpSystem.LastErrors.Add(errorRecord);
return;
}
XmlNode helpItemsNode = null;
if (doc.HasChildNodes)
{
for (int i = 0; i < doc.ChildNodes.Count; i++)
{
XmlNode node = doc.ChildNodes[i];
if (node.NodeType == XmlNodeType.Element && String.Compare(node.Name, "faq", StringComparison.OrdinalIgnoreCase) == 0)
{
helpItemsNode = node;
break;
}
}
}
if (helpItemsNode == null)
return;
using (this.HelpSystem.Trace(helpFile))
{
if (helpItemsNode.HasChildNodes)
{
for (int i = 0; i < helpItemsNode.ChildNodes.Count; i++)
{
XmlNode node = helpItemsNode.ChildNodes[i];
if (node.NodeType == XmlNodeType.Element && String.Compare(node.Name, "faqEntry", StringComparison.OrdinalIgnoreCase) == 0)
{
HelpInfo helpInfo = null;
helpInfo = FaqHelpInfo.Load(node);
if (helpInfo != null)
{
this.HelpSystem.TraceErrors(helpInfo.Errors);
AddCache(helpInfo.Name, helpInfo);
}
continue;
}
if (node.NodeType == XmlNodeType.Element && String.Compare(node.Name, "faqDiv", StringComparison.OrdinalIgnoreCase) == 0)
{
LoadFaqDiv(node);
}
}
}
}
}
/// <summary>
///
/// </summary>
/// <param name="xmlNode"></param>
private void LoadFaqDiv(XmlNode xmlNode)
{
if (xmlNode == null)
return;
for (int i = 0; i < xmlNode.ChildNodes.Count; i++)
{
XmlNode node = xmlNode.ChildNodes[i];
if (node.NodeType == XmlNodeType.Element && String.Compare(node.Name, "faqEntry", StringComparison.OrdinalIgnoreCase) == 0)
{
HelpInfo helpInfo = null;
helpInfo = FaqHelpInfo.Load(node);
if (helpInfo != null)
{
this.HelpSystem.TraceErrors(helpInfo.Errors);
AddCache(helpInfo.Name, helpInfo);
}
}
}
}
#endregion
# region Help Provider Interface
/// <summary>
/// This will reset the help cache. Normally this corresponds to a
/// help culture change.
/// </summary>
internal override void Reset()
{
base.Reset();
_helpFiles.Clear();
}
#endregion
#region Private Data
/// <summary>
/// This is a hashtable to track which help files are loaded already.
///
/// This will avoid one help file getting loaded again and again.
/// </summary>
private readonly Hashtable _helpFiles = new Hashtable();
#endregion
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
namespace Newtonsoft.Json.Utilities
{
internal enum ParserTimeZone
{
Unspecified,
Utc,
LocalWestOfUtc,
LocalEastOfUtc
}
internal struct DateTimeParser
{
static DateTimeParser()
{
Power10 = new[] { -1, 10, 100, 1000, 10000, 100000, 1000000 };
Lzyyyy = "yyyy".Length;
Lzyyyy_ = "yyyy-".Length;
Lzyyyy_MM = "yyyy-MM".Length;
Lzyyyy_MM_ = "yyyy-MM-".Length;
Lzyyyy_MM_dd = "yyyy-MM-dd".Length;
Lzyyyy_MM_ddT = "yyyy-MM-ddT".Length;
LzHH = "HH".Length;
LzHH_ = "HH:".Length;
LzHH_mm = "HH:mm".Length;
LzHH_mm_ = "HH:mm:".Length;
LzHH_mm_ss = "HH:mm:ss".Length;
Lz_ = "-".Length;
Lz_zz = "-zz".Length;
Lz_zz_ = "-zz:".Length;
Lz_zz_zz = "-zz:zz".Length;
}
public int Year;
public int Month;
public int Day;
public int Hour;
public int Minute;
public int Second;
public int Fraction;
public int ZoneHour;
public int ZoneMinute;
public ParserTimeZone Zone;
private string _text;
private int _length;
private static readonly int[] Power10;
private static readonly int Lzyyyy;
private static readonly int Lzyyyy_;
private static readonly int Lzyyyy_MM;
private static readonly int Lzyyyy_MM_;
private static readonly int Lzyyyy_MM_dd;
private static readonly int Lzyyyy_MM_ddT;
private static readonly int LzHH;
private static readonly int LzHH_;
private static readonly int LzHH_mm;
private static readonly int LzHH_mm_;
private static readonly int LzHH_mm_ss;
private static readonly int Lz_;
private static readonly int Lz_zz;
private static readonly int Lz_zz_;
private static readonly int Lz_zz_zz;
private const short MaxFractionDigits = 7;
public bool Parse(string text)
{
_text = text;
_length = text.Length;
if (ParseDate(0) && ParseChar(Lzyyyy_MM_dd, 'T') && ParseTimeAndZoneAndWhitespace(Lzyyyy_MM_ddT))
return true;
return false;
}
private bool ParseDate(int start)
{
return (Parse4Digit(start, out Year)
&& 1 <= Year
&& ParseChar(start + Lzyyyy, '-')
&& Parse2Digit(start + Lzyyyy_, out Month)
&& 1 <= Month
&& Month <= 12
&& ParseChar(start + Lzyyyy_MM, '-')
&& Parse2Digit(start + Lzyyyy_MM_, out Day)
&& 1 <= Day
&& Day <= DateTime.DaysInMonth(Year, Month));
}
private bool ParseTimeAndZoneAndWhitespace(int start)
{
return (ParseTime(ref start) && ParseZone(start));
}
private bool ParseTime(ref int start)
{
if (!(Parse2Digit(start, out Hour)
&& Hour < 24
&& ParseChar(start + LzHH, ':')
&& Parse2Digit(start + LzHH_, out Minute)
&& Minute < 60
&& ParseChar(start + LzHH_mm, ':')
&& Parse2Digit(start + LzHH_mm_, out Second)
&& Second < 60))
{
return false;
}
start += LzHH_mm_ss;
if (ParseChar(start, '.'))
{
Fraction = 0;
int numberOfDigits = 0;
while (++start < _length && numberOfDigits < MaxFractionDigits)
{
int digit = _text[start] - '0';
if (digit < 0 || digit > 9)
break;
Fraction = (Fraction*10) + digit;
numberOfDigits++;
}
if (numberOfDigits < MaxFractionDigits)
{
if (numberOfDigits == 0)
return false;
Fraction *= Power10[MaxFractionDigits - numberOfDigits];
}
}
return true;
}
private bool ParseZone(int start)
{
if (start < _length)
{
char ch = _text[start];
if (ch == 'Z' || ch == 'z')
{
Zone = ParserTimeZone.Utc;
start++;
}
else
{
if (start + 5 < _length
&& Parse2Digit(start + Lz_, out ZoneHour)
&& ZoneHour <= 99
&& ParseChar(start + Lz_zz, ':')
&& Parse2Digit(start + Lz_zz_, out ZoneMinute)
&& ZoneMinute <= 99)
{
switch (ch)
{
case '-':
Zone = ParserTimeZone.LocalWestOfUtc;
start += Lz_zz_zz;
break;
case '+':
Zone = ParserTimeZone.LocalEastOfUtc;
start += Lz_zz_zz;
break;
}
}
}
}
return (start == _length);
}
private bool Parse4Digit(int start, out int num)
{
if (start + 3 < _length)
{
int digit1 = _text[start] - '0';
int digit2 = _text[start + 1] - '0';
int digit3 = _text[start + 2] - '0';
int digit4 = _text[start + 3] - '0';
if (0 <= digit1 && digit1 < 10
&& 0 <= digit2 && digit2 < 10
&& 0 <= digit3 && digit3 < 10
&& 0 <= digit4 && digit4 < 10)
{
num = (((((digit1*10) + digit2)*10) + digit3)*10) + digit4;
return true;
}
}
num = 0;
return false;
}
private bool Parse2Digit(int start, out int num)
{
if (start + 1 < _length)
{
int digit1 = _text[start] - '0';
int digit2 = _text[start + 1] - '0';
if (0 <= digit1 && digit1 < 10
&& 0 <= digit2 && digit2 < 10)
{
num = (digit1*10) + digit2;
return true;
}
}
num = 0;
return false;
}
private bool ParseChar(int start, char ch)
{
return (start < _length && _text[start] == ch);
}
}
}
| |
/*
insert license info here
*/
using System;
using System.Collections;
namespace Business.Data.Laboratorio
{
/// <summary>
/// Generated by MyGeneration using the NHibernate Object Mapping template
/// </summary>
[Serializable]
public sealed class PracticaDeterminacion : Business.BaseDataAccess
{
#region Private Members
private bool m_isChanged;
private int m_idpracticadeterminacion;
private Efector m_idefector;
private Item m_iditempractica;
private int m_iditemdeterminacion;
private string m_titulo;
private int m_orden;
private string m_formatoimpresion;
private Usuario m_idusuarioregistro;
private DateTime m_fecharegistro;
#endregion
#region Default ( Empty ) Class Constuctor
/// <summary>
/// default constructor
/// </summary>
public PracticaDeterminacion()
{
m_idpracticadeterminacion = 0;
m_idefector = new Efector();
m_iditempractica = new Item();
m_iditemdeterminacion = 0;
m_titulo = String.Empty;
m_orden = 0;
m_formatoimpresion = String.Empty;
m_idusuarioregistro = new Usuario();
m_fecharegistro = DateTime.MinValue;
}
#endregion // End of Default ( Empty ) Class Constuctor
#region Required Fields Only Constructor
/// <summary>
/// required (not null) fields only constructor
/// </summary>
public PracticaDeterminacion(
Efector idefector,
Item iditempractica,
int iditemdeterminacion,
int orden,
Usuario idusuarioregistro,
DateTime fecharegistro)
: this()
{
m_idefector = idefector;
m_iditempractica = iditempractica;
m_iditemdeterminacion = iditemdeterminacion;
m_titulo = String.Empty;
m_orden = orden;
m_formatoimpresion = String.Empty;
m_idusuarioregistro = idusuarioregistro;
m_fecharegistro = fecharegistro;
}
#endregion // End Required Fields Only Constructor
#region Public Properties
/// <summary>
///
/// </summary>
public int IdPracticaDeterminacion
{
get { return m_idpracticadeterminacion; }
set
{
m_isChanged |= ( m_idpracticadeterminacion != value );
m_idpracticadeterminacion = value;
}
}
/// <summary>
///
/// </summary>
public Efector IdEfector
{
get { return m_idefector; }
set
{
m_isChanged |= ( m_idefector != value );
m_idefector = value;
}
}
/// <summary>
///
/// </summary>
public Item IdItemPractica
{
get { return m_iditempractica; }
set
{
m_isChanged |= ( m_iditempractica != value );
m_iditempractica = value;
}
}
/// <summary>
///
/// </summary>
public int IdItemDeterminacion
{
get { return m_iditemdeterminacion; }
set
{
m_isChanged |= ( m_iditemdeterminacion != value );
m_iditemdeterminacion = value;
}
}
/// <summary>
///
/// </summary>
public string Titulo
{
get { return m_titulo; }
set
{
if( value != null && value.Length > 100)
throw new ArgumentOutOfRangeException("Invalid value for Titulo", value, value.ToString());
m_isChanged |= (m_titulo != value); m_titulo = value;
}
}
/// <summary>
///
/// </summary>
public int Orden
{
get { return m_orden; }
set
{
m_isChanged |= ( m_orden != value );
m_orden = value;
}
}
/// <summary>
///
/// </summary>
public string FormatoImpresion
{
get { return m_formatoimpresion; }
set
{
if( value != null && value.Length > 50)
throw new ArgumentOutOfRangeException("Invalid value for FormatoImpresion", value, value.ToString());
m_isChanged |= (m_formatoimpresion != value); m_formatoimpresion = value;
}
}
/// <summary>
///
/// </summary>
public Usuario IdUsuarioRegistro
{
get { return m_idusuarioregistro; }
set
{
m_isChanged |= ( m_idusuarioregistro != value );
m_idusuarioregistro = value;
}
}
/// <summary>
///
/// </summary>
public DateTime FechaRegistro
{
get { return m_fecharegistro; }
set
{
m_isChanged |= ( m_fecharegistro != value );
m_fecharegistro = value;
}
}
/// <summary>
/// Returns whether or not the object has changed it's values.
/// </summary>
public bool IsChanged
{
get { return m_isChanged; }
}
#endregion
}
}
| |
//===--- Slice.cs ---------------------------------------------------------===//
//
// Copyright (c) 2015 Joe Duffy. All rights reserved.
//
// This file is distributed under the MIT License. See LICENSE.md for details.
//
//===----------------------------------------------------------------------===//
using System.Collections;
using System.Collections.Generic;
namespace System
{
/// <summary>
/// Slice is a uniform API for dealing with arrays and subarrays, strings
/// and substrings, and unmanaged memory buffers. It adds minimal overhead
/// to regular accesses and is a struct so that creation and subslicing do
/// not require additional allocations. It is type- and memory-safe.
/// </summary>
public struct Slice<T> : IEnumerable<T>
{
/// <summary>A managed array/string; or null for native ptrs.</summary>
readonly object m_object;
/// <summary>An byte-offset into the array/string; or a native ptr.</summary>
readonly UIntPtr m_offset;
/// <summary>Fetches the number of elements this Slice contains.</summary>
public readonly int Length;
/// <summary>
/// Creates a new slice over the entirety of the target array.
/// </summary>
/// <param name="array">The target array.</param>
/// <exception cref="System.ArgumentException">
/// Thrown if the 'array' parameter is null.
/// </exception>
public Slice(T[] array)
{
Contract.Requires(array != null);
m_object = array;
m_offset = new UIntPtr((uint)SliceHelpers<T>.OffsetToArrayData);
Length = array.Length;
}
/// <summary>
/// Creates a new slice over the portion of the target array beginning
/// at 'start' index.
/// </summary>
/// <param name="array">The target array.</param>
/// <param name="start">The index at which to begin the slice.</param>
/// <exception cref="System.ArgumentException">
/// Thrown if the 'array' parameter is null.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified start index is not in range (<0 or >&eq;length).
/// </exception>
public Slice(T[] array, int start)
{
Contract.Requires(array != null);
Contract.RequiresInInclusiveRange(start, array.Length);
if (start < array.Length) {
m_object = array;
m_offset = new UIntPtr(
(uint)(SliceHelpers<T>.OffsetToArrayData + (start * PtrUtils.SizeOf<T>())));
Length = array.Length - start;
}
else {
m_object = null;
m_offset = UIntPtr.Zero;
Length = 0;
}
}
/// <summary>
/// Creates a new slice over the portion of the target array beginning
/// at 'start' index and ending at 'end' index (exclusive).
/// </summary>
/// <param name="array">The target array.</param>
/// <param name="start">The index at which to begin the slice.</param>
/// <param name="end">The index at which to end the slice (exclusive).</param>
/// <exception cref="System.ArgumentException">
/// Thrown if the 'array' parameter is null.
/// </exception>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified start or end index is not in range (<0 or >&eq;length).
/// </exception>
public Slice(T[] array, int start, int end)
{
Contract.Requires(array != null);
Contract.RequiresInInclusiveRange(start, array.Length);
if (start < array.Length) {
m_object = array;
m_offset = new UIntPtr(
(uint)(SliceHelpers<T>.OffsetToArrayData + (start * PtrUtils.SizeOf<T>())));
Length = end - start;
}
else {
m_object = null;
m_offset = UIntPtr.Zero;
Length = 0;
}
}
/// <summary>
/// Creates a new slice over the target unmanaged buffer. Clearly this
/// is quite dangerous, because we are creating arbitrarily typed T's
/// out of a void*-typed block of memory. And the length is not checked.
/// But if this creation is correct, then all subsequent uses are correct.
/// </summary>
/// <param name="ptr">An unmanaged pointer to memory.</param>
/// <param name="length">The number of T elements the memory contains.</param>
public unsafe Slice(void* ptr, int length)
{
Contract.Requires(length >= 0);
Contract.Requires(length == 0 || ptr != null);
m_object = null;
m_offset = new UIntPtr(ptr);
Length = length;
}
/// <summary>
/// An internal helper for creating slices. Not for public use.
/// </summary>
internal Slice(object obj, UIntPtr offset, int length)
{
m_object = obj;
m_offset = offset;
Length = length;
}
/// <summary>
/// Fetches the managed object (if any) that this Slice points at.
/// </summary>
internal object Object
{
get { return m_object; }
}
/// <summary>
/// Fetches the offset -- or sometimes, raw pointer -- for this Slice.
/// </summary>
internal UIntPtr Offset
{
get { return m_offset; }
}
/// <summary>
/// Fetches the element at the specified index.
/// </summary>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified index is not in range (<0 or >&eq;length).
/// </exception>
public T this[int index]
{
get {
Contract.RequiresInRange(index, Length);
return PtrUtils.Get<T>(
m_object, m_offset + (index * PtrUtils.SizeOf<T>()));
}
set {
Contract.RequiresInRange(index, Length);
PtrUtils.Set<T>(
m_object, m_offset + (index * PtrUtils.SizeOf<T>()), value);
}
}
/// <summary>
/// Copies the contents of this Slice into a new array. This heap
/// allocates, so should generally be avoided, however is sometimes
/// necessary to bridge the gap with APIs written in terms of arrays.
/// </summary>
public T[] Copy()
{
var dest = new T[Length];
CopyTo(dest.Slice());
return dest;
}
/// <summary>
/// Copies the contents of this Slice into another. The destination
/// must be at least as big as the source, and may be bigger.
/// </summary>
/// <param name="dest">The Slice to copy items into.</param>
public void CopyTo(Slice<T> dest)
{
Contract.Requires(dest.Length >= Length);
if (Length == 0) {
return;
}
// TODO(joe): specialize to use a fast memcpy if T is pointerless.
for (int i = 0; i < Length; i++) {
dest[i] = this[i];
}
}
/// <summary>
/// Forms a subslice out of the given slice, beginning at 'start'.
/// </summary>
/// <param name="start">The index at which to begin this subslice.</param>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified start index is not in range (<0 or >&eq;length).
/// </exception>
public Slice<T> Sub(int start)
{
return Sub(start, Length);
}
/// <summary>
/// Forms a subslice out of the given slice, beginning at 'start', and
/// ending at 'end' (exclusive).
/// </summary>
/// <param name="start">The index at which to begin this subslice.</param>
/// <param name="end">The index at which to end this subslice (exclusive).</param>
/// <exception cref="System.ArgumentOutOfRangeException">
/// Thrown when the specified start or end index is not in range (<0 or >&eq;length).
/// </exception>
public Slice<T> Sub(int start, int end)
{
Contract.RequiresInInclusiveRange(start, end, Length);
return new Slice<T>(
m_object, m_offset + (start * PtrUtils.SizeOf<T>()), end - start);
}
/// <summary>
/// Checks to see if two slices point at the same memory. Note that
/// this does *not* check to see if the *contents* are equal.
/// </summary>
public bool ReferenceEquals(Slice<T> other)
{
return Object == other.Object &&
Offset == other.Offset && Length == other.Length;
}
/// <summary>
/// Returns an enumerator over the Slice's entire contents.
/// </summary>
public Enumerator GetEnumerator()
{
return new Enumerator(this);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
IEnumerator<T> IEnumerable<T>.GetEnumerator()
{
return GetEnumerator();
}
/// <summary>
/// A struct-based enumerator, to make fast enumerations possible.
/// This isn't designed for direct use, instead see GetEnumerator.
/// </summary>
public struct Enumerator : IEnumerator<T>
{
Slice<T> m_slice; // The slice being enumerated.
int m_position; // The current position.
public Enumerator(Slice<T> slice)
{
m_slice = slice;
m_position = -1;
}
public T Current
{
get { return m_slice[m_position]; }
}
object IEnumerator.Current
{
get { return Current; }
}
public void Dispose()
{
m_slice = default(Slice<T>);
m_position = -1;
}
public bool MoveNext()
{
return ++m_position < m_slice.Length;
}
public void Reset()
{
m_position = -1;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml.Serialization;
using ParatureSDK.Fields;
using ParatureSDK.ParaObjects.EntityReferences;
using System.Text;
namespace ParatureSDK.ParaObjects
{
/// <summary>
/// Holds all the properties of the Ticket module.
/// </summary>
public class Ticket : ParaEntity, IMutableEntity, IHistoricalEntity, IActionRunner
{
/// <summary>
/// The full ticket number, including the account number. Usually in the format
/// of Account #-Ticket #
/// </summary>
public string Ticket_Number
{
get
{
return GetFieldValue<string>("Ticket_Number");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Number");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Number",
FieldType = "text",
DataType = "string"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The product associated to a ticket. It will only be populated in certain configurations.
/// </summary>
public ProductReference Ticket_Product
{
get
{
return GetFieldValue<ProductReference>("Ticket_Product");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Product");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Product",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The status of the ticket
/// </summary>
public TicketStatusReference Ticket_Status
{
get
{
return GetFieldValue<TicketStatusReference>("Ticket_Status");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Status");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Status",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The asset linked to the ticket. this is only populated for certain Product/Asset configurations, when the ticket is linked to an Asset.
/// </summary>
public AssetReference Ticket_Asset
{
get
{
return GetFieldValue<AssetReference>("Ticket_Asset");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Asset");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Asset",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public SlaReference Ticket_Sla
{
get
{
return GetFieldValue<SlaReference>("Ticket_Sla");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Sla");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Sla",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The department the tickets belongs to. While you specified already the department id in your
/// credentials class, it could be that the user you are passing the Token of has access to multiple
/// departments. In which case, the tickets that account has access to will be visible (no matter their departments).
/// </summary>
public DepartmentReference Department
{
get
{
return GetFieldValue<DepartmentReference>("Department");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Department");
if (field == null)
{
field = new StaticField()
{
Name = "Department",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The customer that owns the ticket. If your only requested a standard Ticket read, only the customer id is returned withing the Customer class.
/// </summary>
public CustomerReference Ticket_Customer
{
get
{
return GetFieldValue<CustomerReference>("Ticket_Customer");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Customer");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Customer",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The additional contact associated to this ticket.
/// </summary>
public CustomerReference Additional_Contact
{
get
{
return GetFieldValue<CustomerReference>("Additional_Contact");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Additional_Contact");
if (field == null)
{
field = new StaticField()
{
Name = "Additional_Contact",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The CSR that has entered this ticket (this class is filled only when a Ticket has been created by a CSR). Only the CSR id and Name are filled in case of a standard ticket read.
/// </summary>
public CsrReference Entered_By
{
get
{
return GetFieldValue<CsrReference>("Entered_By");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Entered_By");
if (field == null)
{
field = new StaticField()
{
Name = "Entered_By",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The CSR that is has this ticket assigned to. This class is only filled if the ticket is assigned to a CSR (as opposed to a Queue). If the ticket is assigned to a CSR, this class will only be filled with the ID of the CSR (unless you requested an appropriate request depth.
/// </summary>
public CsrReference Assigned_To
{
get
{
return GetFieldValue<CsrReference>("Assigned_To");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Assigned_To");
if (field == null)
{
field = new StaticField()
{
Name = "Assigned_To",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// Whether email notification is turned on or off.
/// </summary>
public bool? Email_Notification
{
get
{
return GetFieldValue<bool?>("Email_Notification");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Email_Notification");
if (field == null)
{
field = new StaticField()
{
Name = "Email_Notification",
FieldType = "checkbox",
DataType = "boolean"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// Whether email notification to Additional Contact is turned on or off.
/// </summary>
public bool? Email_Notification_Additional_Contact
{
get
{
return GetFieldValue<bool?>("Email_Notification_Additional_Contact");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Email_Notification_Additional_Contact");
if (field == null)
{
field = new StaticField()
{
Name = "Email_Notification_Additional_Contact",
FieldType = "checkbox",
DataType = "boolean"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// Whether email notification to Additional Contact is turned on or off.
/// </summary>
public bool? Hide_From_Customer
{
get
{
return GetFieldValue<bool?>("Hide_From_Customer");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Hide_From_Customer");
if (field == null)
{
field = new StaticField()
{
Name = "Hide_From_Customer",
FieldType = "checkbox",
DataType = "boolean"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// An optional string array of CSR emails that are CCed when an email notification is sent.
/// </summary>
public string Cc_Csr
{
get
{
return GetFieldValue<string>("Cc_Csr");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Cc_Csr");
if (field == null)
{
field = new StaticField()
{
Name = "Cc_Csr",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// An optional string array of customer emails that are CCed when an email notification is sent.
/// </summary>
public string Cc_Customer
{
get
{
return GetFieldValue<string>("Cc_Customer");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Cc_Customer");
if (field == null)
{
field = new StaticField()
{
Name = "Cc_Customer",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public bool Overwrite_Sla_In_Rr
{
get
{
return GetFieldValue<bool>("Overwrite_Sla_In_Rr");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Overwrite_Sla_In_Rr");
if (field == null)
{
field = new StaticField()
{
Name = "Overwrite_Sla_In_Rr",
FieldType = "checkbox",
DataType = "boolean"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public Int64? Initial_Resolution_Target_Duration
{
get
{
return GetFieldValue<Int64?>("Initial_Resolution_Target_Duration");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Resolution_Target_Duration");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Resolution_Target_Duration",
FieldType = "int",
DataType = "int"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public DateTime? Initial_Resolution_Date
{
get
{
return GetFieldValue<DateTime?>("Initial_Resolution_Date");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Resolution_Date");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Resolution_Date",
FieldType = "usdate",
DataType = "date"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public Int64? Initial_Response_Target_Duration
{
get
{
return GetFieldValue<Int64?>("Initial_Response_Target_Duration");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Response_Target_Duration");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Response_Target_Duration",
FieldType = "int",
DataType = "int"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public Int64? Initial_Resolution_Duration_Bh
{
get
{
return GetFieldValue<Int64?>("Initial_Resolution_Duration_Bh");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Resolution_Duration_Bh");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Resolution_Duration_Bh",
FieldType = "int",
DataType = "int"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public Int64? Initial_Response_Duration_Bh
{
get
{
return GetFieldValue<Int64?>("Initial_Response_Duration_Bh");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Response_Duration_Bh");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Response_Duration_Bh",
FieldType = "int",
DataType = "int"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public DateTime? Resolution_Violation_Date_Bh
{
get
{
return GetFieldValue<DateTime?>("Resolution_Violation_Date_Bh");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Resolution_Violation_Date_Bh");
if (field == null)
{
field = new StaticField()
{
Name = "Resolution_Violation_Date_Bh",
FieldType = "usdate",
DataType = "date"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public DateTime? Response_Violation_Date_Bh
{
get
{
return GetFieldValue<DateTime?>("Response_Violation_Date_Bh");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Response_Violation_Date_Bh");
if (field == null)
{
field = new StaticField()
{
Name = "Response_Violation_Date_Bh",
FieldType = "usdate",
DataType = "date"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public SlaReference Response_Sla
{
get
{
return GetFieldValue<SlaReference>("Response_Sla");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Response_Sla");
if (field == null)
{
field = new StaticField()
{
Name = "Response_Sla",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public SlaReference Resolution_Sla
{
get
{
return GetFieldValue<SlaReference>("Resolution_Sla");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Resolution_Sla");
if (field == null)
{
field = new StaticField()
{
Name = "Resolution_Sla",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public Int64? Warning_Time
{
get
{
return GetFieldValue<Int64?>("Warning_Time");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Warning_Time");
if (field == null)
{
field = new StaticField()
{
Name = "Warning_Time",
FieldType = "int",
DataType = "int"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public Int64? Last_Resolution_Duration_Bh
{
get
{
return GetFieldValue<Int64?>("Last_Resolution_Duration_Bh");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Last_Resolution_Duration_Bh");
if (field == null)
{
field = new StaticField()
{
Name = "Last_Resolution_Duration_Bh",
FieldType = "int",
DataType = "int"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public CsrReference Initial_Response_Userid
{
get
{
return GetFieldValue<CsrReference>("Initial_Response_Userid");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Response_Userid");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Response_Userid",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public CsrReference Initial_Resolution_Userid
{
get
{
return GetFieldValue<CsrReference>("Initial_Resolution_Userid");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Initial_Resolution_Userid");
if (field == null)
{
field = new StaticField()
{
Name = "Initial_Resolution_Userid",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public CsrReference Final_Resolution_Userid
{
get
{
return GetFieldValue<CsrReference>("Final_Resolution_Userid");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Final_Resolution_Userid");
if (field == null)
{
field = new StaticField()
{
Name = "Final_Resolution_Userid",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The Queue that has this ticket assigned to. This class is only filled if the ticket is assigned to a Queue (as opposed to a CSR).
/// </summary>
public QueueReference Ticket_Queue
{
get
{
return GetFieldValue<QueueReference>("Ticket_Queue");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Queue");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Queue",
FieldType = "dropdown",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The Portal Alias that the ticket was submitted under
/// </summary>
public PortalAliasReference Ticket_Portal_Alias
{
get
{
return GetFieldValue<PortalAliasReference>("Ticket_Portal_Alias");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Portal_Alias");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Portal_Alias",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// Parent Ticket of this ticket. Only filled whenever there is a parent ticket. Also, only the ticket id will be filled. Please make sure
/// </summary>
public TicketReference Ticket_Parent
{
get
{
return GetFieldValue<TicketReference>("Ticket_Parent");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Parent");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Parent",
FieldType = "entity",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The list, if any exists, of all the child tickets. Please note that, by default, only the ticket id is filled.
/// </summary>
public List<Ticket> Ticket_Children
{
get
{
return GetFieldValue<List<Ticket>>("Ticket_Children");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Children");
if (field == null)
{
field = new StaticField()
{
Name = "Ticket_Children",
FieldType = "entitymultiple",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The list, if any exists, of all the related chats.
/// </summary>
public List<Chat> Related_Chats
{
get
{
return GetFieldValue<List<Chat>>("Related_Chats");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Related_Chats");
if (field == null)
{
field = new StaticField()
{
Name = "Related_Chats",
FieldType = "entitymultiple",
DataType = "entity"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The list, if any exists, of all the Attachments of this ticket.
/// </summary>
public List<Attachment> Ticket_Attachments
{
get
{
return GetFieldValue<List<Attachment>>("Ticket_Attachments");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Ticket_Attachments");
if (field == null)
{
//the FieldType and DataType are NOT from the actual APIs.
//They are a representation purely added for the SDK
field = new StaticField()
{
Name = "Ticket_Attachments",
FieldType = "entitymultiple",
DataType = "attachment"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// Internal property. We don't want to allow the user to accidentally delete attachments.
/// A situation could occur where the developer tries to update tickets but instantiates the Tickets instead of retrieving
/// This could theoretically allow the developer to delete tickets in rare scenarios.
///
/// Going to add this as an internal property which needs to be explicitly called by the user before we decide to delete attachments
/// </summary>
[XmlIgnore]
internal bool? AllowDeleteAllAttachments
{
get
{
return GetFieldValue<bool?>("AllowDeleteAllAttachments");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "AllowDeleteAllAttachments");
if (field == null)
{
//the FieldType and DataType are NOT from the actual APIs.
//They are a representation purely added for the SDK
field = new StaticField()
{
Name = "AllowDeleteAllAttachments",
IgnoreSerializeXml = true,
FieldType = "checkbox",
DataType = "boolean"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The list, if any exists, of all the available actions that can be run agains this ticket.
/// Only the id and the name of the action
/// </summary>
public List<Action> Actions
{
get
{
return GetFieldValue<List<Action>>("Actions");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Actions");
if (field == null)
{
field = new StaticField()
{
IgnoreSerializeXml = true,
Name = "Actions"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// The actions that ran on this ticket. This is only populated if you requested the ticket action history.
/// </summary>
[XmlArray("ActionHistory")]
[XmlArrayItem("History")]
public List<ActionHistory> ActionHistory
{
get
{
return GetFieldValue<List<ActionHistory>>("ActionHistory");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "ActionHistory");
if (field == null)
{
field = new StaticField()
{
IgnoreSerializeXml = true,
Name = "ActionHistory"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public DateTime Date_Created
{
get
{
return GetFieldValue<DateTime>("Date_Created");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Date_Created");
if (field == null)
{
field = new StaticField()
{
Name = "Date_Created",
FieldType = "usdate",
DataType = "date"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
public DateTime Date_Updated
{
get
{
return GetFieldValue<DateTime>("Date_Updated");
}
set
{
var field = StaticFields.FirstOrDefault(f => f.Name == "Date_Updated");
if (field == null)
{
field = new StaticField()
{
Name = "Date_Updated",
FieldType = "usdate",
DataType = "date"
};
StaticFields.Add(field);
}
field.Value = value;
}
}
/// <summary>
/// Uploads an attachment to the current ticket.
/// The attachment will also be added to the current Ticket's attachments collection.
/// </summary>
/// <param name="attachment">
/// The binary Byte array of the attachment you would like to add.
/// </param>
/// <param name="fileName"></param>
[Obsolete("To be removed in favor of Ticket.AddAttachment(ParaService, byte[], string, string) in the next major revision.", false)]
public void AttachmentsAdd(ParaCredentials creds, Byte[] attachment, string contentType, string fileName)
{
Ticket_Attachments.Add(ApiHandler.Ticket.AddAttachment(creds, attachment, contentType, fileName));
}
/// <summary>
/// Uploads an attachment to the current ticket.
/// The attachment will also be added to the current Ticket's attachments collection. To complete attachment, the Update method should be performed on the attachment.
/// </summary>
/// <param name="attachment">
/// The binary Byte array of the attachment you would like to add.
/// </param>
/// <param name="fileName"></param>
public void AddAttachment(ParaService service, Byte[] attachment, string contentType, string fileName)
{
Ticket_Attachments.Add(service.UploadFile<Ticket>(attachment, contentType, fileName));
}
/// <summary>
/// Uploads a text based file to the current ticket. You need to pass a string, and the mime type of a text based file (html, text, etc...).
/// </summary>
/// <param name="text">
/// The content of the text based file.
///</param>
/// <param name="creds">
/// The parature credentials class for the APIs.
/// </param>
/// <param name="contentType">
/// The type of content being uploaded, you have to make sure this is the right text.
/// </param>
/// <param name="fileName">
/// The name you woule like the attachment to have.
///</param>
[Obsolete("To be removed in favor of Ticket.AddAttachment(ParaService, string, string, string) in the next major revision.", false)]
public void AttachmentsAdd(ParaCredentials creds, string text, string contentType, string fileName)
{
Ticket_Attachments.Add(ApiHandler.Ticket.AddAttachment(creds, text, contentType, fileName));
}
/// <summary>
/// Uploads a text based file to the current ticket. You need to pass a string, and the mime type of a text based file (html, text, etc...).
/// </summary>
/// <param name="text">
/// The content of the text based file.
///</param>
/// <param name="creds">
/// The parature credentials class for the APIs.
/// </param>
/// <param name="contentType">
/// The type of content being uploaded, you have to make sure this is the right text.
/// </param>
/// <param name="fileName">
/// The name you woule like the attachment to have.
///</param>
public void AddAttachment(ParaService service, string text, string contentType, string fileName)
{
var encoding = new ASCIIEncoding();
var bytes = encoding.GetBytes(text);
Ticket_Attachments.Add(service.UploadFile<Ticket>(bytes, contentType, fileName));
}
/// <summary>
/// Updates the current Ticket attachment with a text based file. You need to pass a string, and the mime type of a text based file (html, text, etc...).
/// </summary>
/// <param name="text">
/// The content of the text based file.
///</param>
/// <param name="creds">
/// The parature credentials class for the APIs.
/// </param>
/// <param name="contentType">
/// The type of content being uploaded, you have to make sure this is the right text.
/// </param>
/// <param name="fileName">
/// The name you woule like the attachment to have.
///</param>
[Obsolete("To be removed in favor of Ticket.UpdateAttachment(string, string, string) in the next major revision.", false)]
public void AttachmentsUpdate(ParaCredentials creds, string text, string attachmentGuid, string contentType, string fileName)
{
AttachmentsDelete(attachmentGuid);
Ticket_Attachments.Add(ApiHandler.Ticket.AddAttachment(creds, text, contentType, fileName));
}
/// <summary>
/// Updates the current Ticket attachment with a text based file. You need to pass a string, and the mime type of a text based file (html, text, etc...).
/// </summary>
/// <param name="text">
/// The content of the text based file.
///</param>
/// <param name="creds">
/// The parature credentials class for the APIs.
/// </param>
/// <param name="contentType">
/// The type of content being uploaded, you have to make sure this is the right text.
/// </param>
/// <param name="fileName">
/// The name you woule like the attachment to have.
///</param>
public void UpdateAttachment(ParaService service, string text, string attachmentGuid, string contentType, string fileName)
{
var encoding = new ASCIIEncoding();
var bytes = encoding.GetBytes(text);
UpdateAttachment(service, bytes, attachmentGuid, contentType, fileName);
}
/// <summary>
/// If you have an attachment and would like to replace the file, use this method. It will actually delete
/// the existing attachment, and then add a new one to replace it.
/// </summary>
[Obsolete("To be removed in favor of Ticket.UpdateAttachment(ParaService, byte[], string, string) in the next major revision.", false)]
public void AttachmentsUpdate(ParaCredentials creds, Byte[] attachment, string attachmentGuid, string contentType, string fileName)
{
AttachmentsDelete(attachmentGuid);
Ticket_Attachments.Add(ApiHandler.Ticket.AddAttachment(creds, attachment, contentType, fileName));
}
/// <summary>
/// If you have an attachment and would like to replace the file, use this method. It will actually delete
/// the existing attachment, and then add a new one to replace it.
/// </summary>
public void UpdateAttachment(ParaService service, Byte[] attachment, string attachmentGuid, string contentType, string fileName)
{
DeleteAttachment(attachmentGuid);
Ticket_Attachments.Add(service.UploadFile<Ticket>(attachment, contentType, fileName));
}
/// <summary>
/// If you have an attachment and would like to delete, just pass the id here.
/// </summary>
[Obsolete("To be removed in favor of Ticket.DeleteAttachment in the next major revision.", false)]
public bool AttachmentsDelete(string attachmentGuid)
{
return DeleteAttachment(attachmentGuid);
}
/// <summary>
/// If you have an attachment and would like to delete, just pass the id here.
/// </summary>
public bool DeleteAttachment(string attachmentGuid)
{
if (Ticket_Attachments == null)
{
return false;
}
var matchingAtt = Ticket_Attachments.FirstOrDefault(at => at.Guid == attachmentGuid);
if (matchingAtt != null)
{
Ticket_Attachments.Remove(matchingAtt);
if (Ticket_Attachments.Any() == false)
{
AllowDeleteAllAttachments = true;
}
return true;
}
return false;
}
/// <summary>
/// Use this method to explicitly delete all attachments.
/// </summary>
public void DeleteAllAttachments()
{
AllowDeleteAllAttachments = true;
Ticket_Attachments = new List<Attachment>();
}
public Ticket()
: base()
{
}
public Ticket(Ticket ticket)
: base(ticket)
{
if (ticket != null)
{
ActionHistory = new List<ActionHistory>(ticket.ActionHistory);
Actions = new List<Action>(ticket.Actions);
Additional_Contact = ticket.Additional_Contact;
Assigned_To = ticket.Assigned_To;
Cc_Csr = ticket.Cc_Csr;
Cc_Customer = ticket.Cc_Customer;
Date_Created = ticket.Date_Created;
Date_Updated = ticket.Date_Updated;
Department = ticket.Department;
Email_Notification = ticket.Email_Notification;
Email_Notification_Additional_Contact = ticket.Email_Notification_Additional_Contact;
Entered_By = ticket.Entered_By;
Hide_From_Customer = ticket.Hide_From_Customer;
Id = ticket.Id;
Ticket_Asset = ticket.Ticket_Asset;
Ticket_Attachments = new List<Attachment>(ticket.Ticket_Attachments);
if (ticket.Ticket_Customer != null)
{
Ticket_Customer = ticket.Ticket_Customer;
}
if (ticket.Ticket_Children != null)
{
Ticket_Children = new List<Ticket>(ticket.Ticket_Children);
}
Ticket_Number = ticket.Ticket_Number;
if (ticket.Ticket_Parent != null)
{
Ticket_Parent = ticket.Ticket_Parent;
}
Ticket_Product = ticket.Ticket_Product;
Ticket_Queue = ticket.Ticket_Queue;
Ticket_Status = ticket.Ticket_Status;
}
}
public override string GetReadableName()
{
return "Ticket #" + Id;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32;
using Microsoft.Win32.SafeHandles;
using System;
using System.Collections;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Security;
using System.Security.Principal;
namespace System.Security.AccessControl
{
internal static class Win32
{
//
// Wrapper around advapi32.ConvertSecurityDescriptorToStringSecurityDescriptorW
//
internal static int ConvertSdToSddl(
byte[] binaryForm,
int requestedRevision,
SecurityInfos si,
out string resultSddl)
{
int errorCode;
IntPtr ByteArray;
uint ByteArraySize = 0;
if (!Interop.Advapi32.ConvertSdToStringSd(binaryForm, (uint)requestedRevision, (uint)si, out ByteArray, ref ByteArraySize))
{
errorCode = Marshal.GetLastWin32Error();
goto Error;
}
//
// Extract data from the returned pointer
//
resultSddl = Marshal.PtrToStringUni(ByteArray);
//
// Now is a good time to get rid of the returned pointer
//
Interop.Kernel32.LocalFree(ByteArray);
return 0;
Error:
resultSddl = null;
if (errorCode == Interop.Errors.ERROR_NOT_ENOUGH_MEMORY)
{
throw new OutOfMemoryException();
}
return errorCode;
}
//
// Wrapper around advapi32.GetSecurityInfo
//
internal static int GetSecurityInfo(
ResourceType resourceType,
string name,
SafeHandle handle,
AccessControlSections accessControlSections,
out RawSecurityDescriptor resultSd
)
{
resultSd = null;
int errorCode;
IntPtr SidOwner, SidGroup, Dacl, Sacl, ByteArray;
SecurityInfos SecurityInfos = 0;
Privilege privilege = null;
if ((accessControlSections & AccessControlSections.Owner) != 0)
{
SecurityInfos |= SecurityInfos.Owner;
}
if ((accessControlSections & AccessControlSections.Group) != 0)
{
SecurityInfos |= SecurityInfos.Group;
}
if ((accessControlSections & AccessControlSections.Access) != 0)
{
SecurityInfos |= SecurityInfos.DiscretionaryAcl;
}
if ((accessControlSections & AccessControlSections.Audit) != 0)
{
SecurityInfos |= SecurityInfos.SystemAcl;
privilege = new Privilege(Privilege.Security);
}
try
{
if (privilege != null)
{
try
{
privilege.Enable();
}
catch (PrivilegeNotHeldException)
{
// we will ignore this exception and press on just in case this is a remote resource
}
}
if (name != null)
{
errorCode = (int)Interop.Advapi32.GetSecurityInfoByName(name, (uint)resourceType, (uint)SecurityInfos, out SidOwner, out SidGroup, out Dacl, out Sacl, out ByteArray);
}
else if (handle != null)
{
if (handle.IsInvalid)
{
throw new ArgumentException(
SR.Argument_InvalidSafeHandle,
nameof(handle));
}
else
{
errorCode = (int)Interop.Advapi32.GetSecurityInfoByHandle(handle, (uint)resourceType, (uint)SecurityInfos, out SidOwner, out SidGroup, out Dacl, out Sacl, out ByteArray);
}
}
else
{
// both are null, shouldn't happen
// Changing from SystemException to ArgumentException as this code path is indicative of a null name argument
// as well as an accessControlSections argument with an audit flag
throw new ArgumentException();
}
if (errorCode == Interop.Errors.ERROR_SUCCESS && IntPtr.Zero.Equals(ByteArray))
{
//
// This means that the object doesn't have a security descriptor. And thus we throw
// a specific exception for the caller to catch and handle properly.
//
throw new InvalidOperationException(SR.InvalidOperation_NoSecurityDescriptor);
}
else if (errorCode == Interop.Errors.ERROR_NOT_ALL_ASSIGNED ||
errorCode == Interop.Errors.ERROR_PRIVILEGE_NOT_HELD)
{
throw new PrivilegeNotHeldException(Privilege.Security);
}
else if (errorCode == Interop.Errors.ERROR_ACCESS_DENIED ||
errorCode == Interop.Errors.ERROR_CANT_OPEN_ANONYMOUS)
{
throw new UnauthorizedAccessException();
}
if (errorCode != Interop.Errors.ERROR_SUCCESS)
{
goto Error;
}
}
catch
{
// protection against exception filter-based luring attacks
if (privilege != null)
{
privilege.Revert();
}
throw;
}
finally
{
if (privilege != null)
{
privilege.Revert();
}
}
//
// Extract data from the returned pointer
//
uint Length = Interop.Advapi32.GetSecurityDescriptorLength(ByteArray);
byte[] BinaryForm = new byte[Length];
Marshal.Copy(ByteArray, BinaryForm, 0, (int)Length);
Interop.Kernel32.LocalFree(ByteArray);
resultSd = new RawSecurityDescriptor(BinaryForm, 0);
return Interop.Errors.ERROR_SUCCESS;
Error:
if (errorCode == Interop.Errors.ERROR_NOT_ENOUGH_MEMORY)
{
throw new OutOfMemoryException();
}
return errorCode;
}
//
// Wrapper around advapi32.SetNamedSecurityInfoW and advapi32.SetSecurityInfo
//
internal static int SetSecurityInfo(
ResourceType type,
string name,
SafeHandle handle,
SecurityInfos securityInformation,
SecurityIdentifier owner,
SecurityIdentifier group,
GenericAcl sacl,
GenericAcl dacl)
{
int errorCode;
int Length;
byte[] OwnerBinary = null, GroupBinary = null, SaclBinary = null, DaclBinary = null;
Privilege securityPrivilege = null;
if (owner != null)
{
Length = owner.BinaryLength;
OwnerBinary = new byte[Length];
owner.GetBinaryForm(OwnerBinary, 0);
}
if (group != null)
{
Length = group.BinaryLength;
GroupBinary = new byte[Length];
group.GetBinaryForm(GroupBinary, 0);
}
if (dacl != null)
{
Length = dacl.BinaryLength;
DaclBinary = new byte[Length];
dacl.GetBinaryForm(DaclBinary, 0);
}
if (sacl != null)
{
Length = sacl.BinaryLength;
SaclBinary = new byte[Length];
sacl.GetBinaryForm(SaclBinary, 0);
}
if ((securityInformation & SecurityInfos.SystemAcl) != 0)
{
//
// Enable security privilege if trying to set a SACL.
// Note: even setting it by handle needs this privilege enabled!
//
securityPrivilege = new Privilege(Privilege.Security);
}
try
{
if (securityPrivilege != null)
{
try
{
securityPrivilege.Enable();
}
catch (PrivilegeNotHeldException)
{
// we will ignore this exception and press on just in case this is a remote resource
}
}
if (name != null)
{
errorCode = (int)Interop.Advapi32.SetSecurityInfoByName(name, (uint)type, unchecked((uint)securityInformation), OwnerBinary, GroupBinary, DaclBinary, SaclBinary);
}
else if (handle != null)
{
if (handle.IsInvalid)
{
throw new ArgumentException(
SR.Argument_InvalidSafeHandle,
nameof(handle));
}
else
{
errorCode = (int)Interop.Advapi32.SetSecurityInfoByHandle(handle, (uint)type, (uint)securityInformation, OwnerBinary, GroupBinary, DaclBinary, SaclBinary);
}
}
else
{
// both are null, shouldn't happen
Debug.Assert(false, "Internal error: both name and handle are null");
throw new ArgumentException();
}
if (errorCode == Interop.Errors.ERROR_NOT_ALL_ASSIGNED ||
errorCode == Interop.Errors.ERROR_PRIVILEGE_NOT_HELD)
{
throw new PrivilegeNotHeldException(Privilege.Security);
}
else if (errorCode == Interop.Errors.ERROR_ACCESS_DENIED ||
errorCode == Interop.Errors.ERROR_CANT_OPEN_ANONYMOUS)
{
throw new UnauthorizedAccessException();
}
else if (errorCode != Interop.Errors.ERROR_SUCCESS)
{
goto Error;
}
}
catch
{
// protection against exception filter-based luring attacks
if (securityPrivilege != null)
{
securityPrivilege.Revert();
}
throw;
}
finally
{
if (securityPrivilege != null)
{
securityPrivilege.Revert();
}
}
return 0;
Error:
if (errorCode == Interop.Errors.ERROR_NOT_ENOUGH_MEMORY)
{
throw new OutOfMemoryException();
}
return errorCode;
}
}
}
| |
namespace ControlzEx.Helpers
{
using System;
using System.Collections.Generic;
using System.Windows;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using ControlzEx.Controls.Internal;
using ControlzEx.Standard;
public static class GlowWindowBitmapGenerator
{
public static RenderTargetBitmap GenerateBitmapSource(GlowBitmapPart part, int glowDepth, bool useRadialGradientForCorners)
{
var size = GetSize(part, glowDepth);
var drawingVisual = new DrawingVisual();
using (var drawingContext = drawingVisual.RenderOpen())
{
if (glowDepth > 1)
{
var gradientBrush = CreateGradientBrush(part, useRadialGradientForCorners);
gradientBrush.Freeze();
drawingContext.DrawRectangle(gradientBrush, null, new Rect(0, 0, size.Width, size.Height));
drawingContext.DrawRectangle(Brushes.Black, null, GetBlackRect(part, glowDepth));
}
else
{
drawingContext.DrawRectangle(Brushes.Black, null, new Rect(0, 0, size.Width, size.Height));
}
}
var targetBitmap = new RenderTargetBitmap((int)size.Width, (int)size.Height, 96, 96, PixelFormats.Pbgra32);
targetBitmap.Render(drawingVisual);
return targetBitmap;
}
private static Rect GetBlackRect(GlowBitmapPart part, int glowDepth)
{
switch (part)
{
case GlowBitmapPart.CornerTopLeft:
return new Rect(new Point(glowDepth - 1, glowDepth - 1), new Size(1, 1));
case GlowBitmapPart.CornerTopRight:
return new Rect(new Point(0, glowDepth - 1), new Size(1, 1));
case GlowBitmapPart.CornerBottomLeft:
return new Rect(new Point(glowDepth - 1, 0), new Size(1, 1));
case GlowBitmapPart.CornerBottomRight:
return new Rect(new Point(0, 0), new Size(1, 1));
case GlowBitmapPart.TopLeft:
case GlowBitmapPart.Top:
case GlowBitmapPart.TopRight:
return new Rect(new Point(0, glowDepth - 1), new Size(glowDepth, 1));
case GlowBitmapPart.LeftTop:
case GlowBitmapPart.Left:
case GlowBitmapPart.LeftBottom:
return new Rect(new Point(glowDepth - 1, 0), new Size(1, glowDepth));
case GlowBitmapPart.BottomLeft:
case GlowBitmapPart.Bottom:
case GlowBitmapPart.BottomRight:
return new Rect(new Point(0, 0), new Size(glowDepth, 1));
case GlowBitmapPart.RightTop:
case GlowBitmapPart.Right:
case GlowBitmapPart.RightBottom:
return new Rect(new Point(0, 0), new Size(1, glowDepth));
default:
throw new ArgumentOutOfRangeException(nameof(part), part, null);
}
}
private static Size GetSize(GlowBitmapPart part, int glowDepth)
{
switch (part)
{
case GlowBitmapPart.CornerTopLeft:
case GlowBitmapPart.CornerTopRight:
case GlowBitmapPart.CornerBottomLeft:
case GlowBitmapPart.CornerBottomRight:
return new Size(glowDepth, glowDepth);
case GlowBitmapPart.Top:
case GlowBitmapPart.Bottom:
return new Size(1, glowDepth);
case GlowBitmapPart.TopLeft:
case GlowBitmapPart.TopRight:
case GlowBitmapPart.BottomLeft:
case GlowBitmapPart.BottomRight:
return new Size(Math.Min(glowDepth, 6), glowDepth);
case GlowBitmapPart.Left:
case GlowBitmapPart.Right:
return new Size(glowDepth, 1);
case GlowBitmapPart.LeftTop:
case GlowBitmapPart.LeftBottom:
case GlowBitmapPart.RightTop:
case GlowBitmapPart.RightBottom:
return new Size(glowDepth, Math.Min(glowDepth, 6));
default:
throw new ArgumentOutOfRangeException(nameof(part), part, null);
}
}
private static GradientBrush CreateGradientBrush(GlowBitmapPart part, bool useRadialGradientForCorners)
{
var startAndEndPoint = GetStartAndEndPoint(part);
var gradientStops = GetGradientStops(part, useRadialGradientForCorners);
var gradientStopCollection = new GradientStopCollection(gradientStops);
if (useRadialGradientForCorners == false)
{
return new LinearGradientBrush(gradientStopCollection, startAndEndPoint.Start, startAndEndPoint.End);
}
switch (part)
{
case GlowBitmapPart.CornerTopLeft:
case GlowBitmapPart.CornerTopRight:
case GlowBitmapPart.CornerBottomLeft:
case GlowBitmapPart.CornerBottomRight:
return new RadialGradientBrush(gradientStopCollection)
{
GradientOrigin = startAndEndPoint.Start,
Center = new Point(startAndEndPoint.Start.X.AreClose(0) ? 0.2 : 0.8, startAndEndPoint.Start.Y.AreClose(0) ? 0.2 : 0.8),
RadiusX = 1,
RadiusY = 1
};
case GlowBitmapPart.TopLeft:
case GlowBitmapPart.Top:
case GlowBitmapPart.TopRight:
case GlowBitmapPart.LeftTop:
case GlowBitmapPart.Left:
case GlowBitmapPart.LeftBottom:
case GlowBitmapPart.BottomLeft:
case GlowBitmapPart.Bottom:
case GlowBitmapPart.BottomRight:
case GlowBitmapPart.RightTop:
case GlowBitmapPart.Right:
case GlowBitmapPart.RightBottom:
return new LinearGradientBrush(gradientStopCollection, startAndEndPoint.Start, startAndEndPoint.End);
default:
throw new ArgumentOutOfRangeException(nameof(part), part, null);
}
}
private static StartAndEndPoint GetStartAndEndPoint(GlowBitmapPart part)
{
switch (part)
{
case GlowBitmapPart.CornerTopLeft:
return new(new Point(1, 1), new Point(0, 0));
case GlowBitmapPart.CornerTopRight:
return new(new Point(0, 1), new Point(1, 0));
case GlowBitmapPart.CornerBottomLeft:
return new(new Point(1, 0), new Point(0, 1));
case GlowBitmapPart.CornerBottomRight:
return new(new Point(0, 0), new Point(1, 1));
case GlowBitmapPart.TopLeft:
return new(new Point(0.6, 1), new Point(0.5, 0));
case GlowBitmapPart.Top:
return new(new Point(0.5, 1), new Point(0.5, 0));
case GlowBitmapPart.TopRight:
return new(new Point(0.4, 1), new Point(0.5, 0));
case GlowBitmapPart.LeftTop:
return new(new Point(1, 0.1), new Point(0, 0));
case GlowBitmapPart.Left:
return new(new Point(1, 0), new Point(0, 0));
case GlowBitmapPart.LeftBottom:
return new(new Point(1, 0), new Point(0, 0.1));
case GlowBitmapPart.BottomLeft:
return new(new Point(0.6, 0), new Point(0.5, 1));
case GlowBitmapPart.Bottom:
return new(new Point(0.5, 0), new Point(0.5, 1));
case GlowBitmapPart.BottomRight:
return new(new Point(0.4, 0), new Point(0.5, 1));
case GlowBitmapPart.RightTop:
return new(new Point(0, 0.1), new Point(1, 0));
case GlowBitmapPart.Right:
return new(new Point(0, 0), new Point(1, 0));
case GlowBitmapPart.RightBottom:
return new(new Point(0, 0), new Point(1, 0.1));
default:
throw new ArgumentOutOfRangeException(nameof(part), part, null);
}
}
private static IEnumerable<GradientStop> GetGradientStops(GlowBitmapPart part, bool useRadialGradientForCorners)
{
switch (part)
{
case GlowBitmapPart.CornerBottomLeft:
case GlowBitmapPart.CornerBottomRight:
case GlowBitmapPart.CornerTopLeft:
case GlowBitmapPart.CornerTopRight:
if (useRadialGradientForCorners)
{
yield return new GradientStop(ColorFromString("#55838383"), 0);
yield return new GradientStop(ColorFromString("#02838383"), 0.5);
yield return new GradientStop(ColorFromString("#00000000"), 1);
}
else
{
yield return new GradientStop(ColorFromString("#55838383"), 0);
yield return new GradientStop(ColorFromString("#02838383"), 0.3);
yield return new GradientStop(ColorFromString("#00000000"), 1);
}
break;
default:
yield return new GradientStop(ColorFromString("#55838383"), 0);
yield return new GradientStop(ColorFromString("#02838383"), 0.6);
yield return new GradientStop(ColorFromString("#00000000"), 1);
break;
}
}
private static Color ColorFromString(string input)
{
return (Color)ColorConverter.ConvertFromString(input);
}
private readonly struct StartAndEndPoint
{
public StartAndEndPoint(Point start, Point end)
{
this.Start = start;
this.End = end;
}
public Point Start { get; }
public Point End { get; }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using System.Threading.Tasks.Sources;
using Internal.Runtime.CompilerServices;
namespace System.Runtime.CompilerServices
{
/// <summary>Represents a builder for asynchronous methods that returns a <see cref="ValueTask{TResult}"/>.</summary>
/// <typeparam name="TResult">The type of the result.</typeparam>
[StructLayout(LayoutKind.Auto)]
public struct AsyncValueTaskMethodBuilder<TResult>
{
/// <summary>Sentinel object used to indicate that the builder completed synchronously and successfully.</summary>
/// <remarks>
/// To avoid memory safety issues even in the face of invalid race conditions, we ensure that the type of this object
/// is valid for the mode in which we're operating. As such, it's cached on the generic builder per TResult
/// rather than having one sentinel instance for all types.
/// </remarks>
internal static readonly object s_syncSuccessSentinel = AsyncTaskCache.s_valueTaskPoolingEnabled ? (object)
new SyncSuccessSentinelStateMachineBox() :
new Task<TResult>(default(TResult)!);
/// <summary>The wrapped state machine or task. If the operation completed synchronously and successfully, this will be a sentinel object compared by reference identity.</summary>
private object? m_task; // Debugger depends on the exact name of this field.
/// <summary>The result for this builder if it's completed synchronously, in which case <see cref="m_task"/> will be <see cref="s_syncSuccessSentinel"/>.</summary>
private TResult _result;
/// <summary>Creates an instance of the <see cref="AsyncValueTaskMethodBuilder{TResult}"/> struct.</summary>
/// <returns>The initialized instance.</returns>
public static AsyncValueTaskMethodBuilder<TResult> Create() => default;
/// <summary>Begins running the builder with the associated state machine.</summary>
/// <typeparam name="TStateMachine">The type of the state machine.</typeparam>
/// <param name="stateMachine">The state machine instance, passed by reference.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void Start<TStateMachine>(ref TStateMachine stateMachine) where TStateMachine : IAsyncStateMachine =>
AsyncMethodBuilderCore.Start(ref stateMachine);
/// <summary>Associates the builder with the specified state machine.</summary>
/// <param name="stateMachine">The state machine instance to associate with the builder.</param>
public void SetStateMachine(IAsyncStateMachine stateMachine) =>
AsyncMethodBuilderCore.SetStateMachine(stateMachine, task: null);
/// <summary>Marks the value task as successfully completed.</summary>
/// <param name="result">The result to use to complete the value task.</param>
public void SetResult(TResult result)
{
if (m_task is null)
{
_result = result;
m_task = s_syncSuccessSentinel;
}
else if (AsyncTaskCache.s_valueTaskPoolingEnabled)
{
Unsafe.As<StateMachineBox>(m_task).SetResult(result);
}
else
{
AsyncTaskMethodBuilder<TResult>.SetExistingTaskResult(Unsafe.As<Task<TResult>>(m_task), result);
}
}
/// <summary>Marks the value task as failed and binds the specified exception to the value task.</summary>
/// <param name="exception">The exception to bind to the value task.</param>
public void SetException(Exception exception)
{
if (AsyncTaskCache.s_valueTaskPoolingEnabled)
{
SetException(exception, ref Unsafe.As<object?, StateMachineBox?>(ref m_task));
}
else
{
AsyncTaskMethodBuilder<TResult>.SetException(exception, ref Unsafe.As<object?, Task<TResult>?>(ref m_task));
}
}
internal static void SetException(Exception exception, [NotNull] ref StateMachineBox? boxFieldRef)
{
if (exception is null)
{
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.exception);
}
(boxFieldRef ??= CreateWeaklyTypedStateMachineBox()).SetException(exception);
}
/// <summary>Gets the value task for this builder.</summary>
public ValueTask<TResult> Task
{
get
{
if (m_task == s_syncSuccessSentinel)
{
return new ValueTask<TResult>(_result);
}
// With normal access paterns, m_task should always be non-null here: the async method should have
// either completed synchronously, in which case SetResult would have set m_task to a non-null object,
// or it should be completing asynchronously, in which case AwaitUnsafeOnCompleted would have similarly
// initialized m_task to a state machine object. However, if the type is used manually (not via
// compiler-generated code) and accesses Task directly, we force it to be initialized. Things will then
// "work" but in a degraded mode, as we don't know the TStateMachine type here, and thus we use a box around
// the interface instead.
if (AsyncTaskCache.s_valueTaskPoolingEnabled)
{
var box = Unsafe.As<StateMachineBox?>(m_task);
if (box is null)
{
m_task = box = CreateWeaklyTypedStateMachineBox();
}
return new ValueTask<TResult>(box, box.Version);
}
else
{
var task = Unsafe.As<Task<TResult>?>(m_task);
if (task is null)
{
m_task = task = new Task<TResult>(); // base task used rather than box to minimize size when used as manual promise
}
return new ValueTask<TResult>(task);
}
}
}
/// <summary>Schedules the state machine to proceed to the next action when the specified awaiter completes.</summary>
/// <typeparam name="TAwaiter">The type of the awaiter.</typeparam>
/// <typeparam name="TStateMachine">The type of the state machine.</typeparam>
/// <param name="awaiter">the awaiter</param>
/// <param name="stateMachine">The state machine.</param>
public void AwaitOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine)
where TAwaiter : INotifyCompletion
where TStateMachine : IAsyncStateMachine
{
if (AsyncTaskCache.s_valueTaskPoolingEnabled)
{
AwaitOnCompleted(ref awaiter, ref stateMachine, ref Unsafe.As<object?, StateMachineBox?>(ref m_task));
}
else
{
AsyncTaskMethodBuilder<TResult>.AwaitOnCompleted(ref awaiter, ref stateMachine, ref Unsafe.As<object?, Task<TResult>?>(ref m_task));
}
}
internal static void AwaitOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine, [NotNull] ref StateMachineBox? box)
where TAwaiter : INotifyCompletion
where TStateMachine : IAsyncStateMachine
{
try
{
awaiter.OnCompleted(GetStateMachineBox(ref stateMachine, ref box).MoveNextAction);
}
catch (Exception e)
{
System.Threading.Tasks.Task.ThrowAsync(e, targetContext: null);
}
}
/// <summary>Schedules the state machine to proceed to the next action when the specified awaiter completes.</summary>
/// <typeparam name="TAwaiter">The type of the awaiter.</typeparam>
/// <typeparam name="TStateMachine">The type of the state machine.</typeparam>
/// <param name="awaiter">the awaiter</param>
/// <param name="stateMachine">The state machine.</param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(ref TAwaiter awaiter, ref TStateMachine stateMachine)
where TAwaiter : ICriticalNotifyCompletion
where TStateMachine : IAsyncStateMachine
{
if (AsyncTaskCache.s_valueTaskPoolingEnabled)
{
AwaitUnsafeOnCompleted(ref awaiter, ref stateMachine, ref Unsafe.As<object?, StateMachineBox?>(ref m_task));
}
else
{
AsyncTaskMethodBuilder<TResult>.AwaitUnsafeOnCompleted(ref awaiter, ref stateMachine, ref Unsafe.As<object?, Task<TResult>?>(ref m_task));
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal static void AwaitUnsafeOnCompleted<TAwaiter, TStateMachine>(
ref TAwaiter awaiter, ref TStateMachine stateMachine, [NotNull] ref StateMachineBox? boxRef)
where TAwaiter : ICriticalNotifyCompletion
where TStateMachine : IAsyncStateMachine
{
IAsyncStateMachineBox box = GetStateMachineBox(ref stateMachine, ref boxRef);
AsyncTaskMethodBuilder<VoidTaskResult>.AwaitUnsafeOnCompleted(ref awaiter, box);
}
/// <summary>Gets the "boxed" state machine object.</summary>
/// <typeparam name="TStateMachine">Specifies the type of the async state machine.</typeparam>
/// <param name="stateMachine">The state machine.</param>
/// <param name="boxFieldRef">A reference to the field containing the initialized state machine box.</param>
/// <returns>The "boxed" state machine.</returns>
private static IAsyncStateMachineBox GetStateMachineBox<TStateMachine>(
ref TStateMachine stateMachine,
[NotNull] ref StateMachineBox? boxFieldRef)
where TStateMachine : IAsyncStateMachine
{
ExecutionContext? currentContext = ExecutionContext.Capture();
// Check first for the most common case: not the first yield in an async method.
// In this case, the first yield will have already "boxed" the state machine in
// a strongly-typed manner into an AsyncStateMachineBox. It will already contain
// the state machine as well as a MoveNextDelegate and a context. The only thing
// we might need to do is update the context if that's changed since it was stored.
if (boxFieldRef is StateMachineBox<TStateMachine> stronglyTypedBox)
{
if (stronglyTypedBox.Context != currentContext)
{
stronglyTypedBox.Context = currentContext;
}
return stronglyTypedBox;
}
// The least common case: we have a weakly-typed boxed. This results if the debugger
// or some other use of reflection accesses a property like ObjectIdForDebugger. In
// such situations, we need to get an object to represent the builder, but we don't yet
// know the type of the state machine, and thus can't use TStateMachine. Instead, we
// use the IAsyncStateMachine interface, which all TStateMachines implement. This will
// result in a boxing allocation when storing the TStateMachine if it's a struct, but
// this only happens in active debugging scenarios where such performance impact doesn't
// matter.
if (boxFieldRef is StateMachineBox<IAsyncStateMachine> weaklyTypedBox)
{
// If this is the first await, we won't yet have a state machine, so store it.
if (weaklyTypedBox.StateMachine is null)
{
Debugger.NotifyOfCrossThreadDependency(); // same explanation as with usage below
weaklyTypedBox.StateMachine = stateMachine;
}
// Update the context. This only happens with a debugger, so no need to spend
// extra IL checking for equality before doing the assignment.
weaklyTypedBox.Context = currentContext;
return weaklyTypedBox;
}
// Alert a listening debugger that we can't make forward progress unless it slips threads.
// If we don't do this, and a method that uses "await foo;" is invoked through funceval,
// we could end up hooking up a callback to push forward the async method's state machine,
// the debugger would then abort the funceval after it takes too long, and then continuing
// execution could result in another callback being hooked up. At that point we have
// multiple callbacks registered to push the state machine, which could result in bad behavior.
Debugger.NotifyOfCrossThreadDependency();
// At this point, m_task should really be null, in which case we want to create the box.
// However, in a variety of debugger-related (erroneous) situations, it might be non-null,
// e.g. if the Task property is examined in a Watch window, forcing it to be lazily-intialized
// as a Task<TResult> rather than as an ValueTaskStateMachineBox. The worst that happens in such
// cases is we lose the ability to properly step in the debugger, as the debugger uses that
// object's identity to track this specific builder/state machine. As such, we proceed to
// overwrite whatever's there anyway, even if it's non-null.
var box = StateMachineBox<TStateMachine>.GetOrCreateBox();
boxFieldRef = box; // important: this must be done before storing stateMachine into box.StateMachine!
box.StateMachine = stateMachine;
box.Context = currentContext;
return box;
}
/// <summary>
/// Creates a box object for use when a non-standard access pattern is employed, e.g. when Task
/// is evaluated in the debugger prior to the async method yielding for the first time.
/// </summary>
internal static StateMachineBox CreateWeaklyTypedStateMachineBox() => new StateMachineBox<IAsyncStateMachine>();
/// <summary>
/// Gets an object that may be used to uniquely identify this builder to the debugger.
/// </summary>
/// <remarks>
/// This property lazily instantiates the ID in a non-thread-safe manner.
/// It must only be used by the debugger and tracing purposes, and only in a single-threaded manner
/// when no other threads are in the middle of accessing this or other members that lazily initialize the box.
/// </remarks>
internal object ObjectIdForDebugger
{
get
{
if (m_task is null)
{
m_task = AsyncTaskCache.s_valueTaskPoolingEnabled ? (object)
CreateWeaklyTypedStateMachineBox() :
AsyncTaskMethodBuilder<TResult>.CreateWeaklyTypedStateMachineBox();
}
return m_task;
}
}
/// <summary>The base type for all value task box reusable box objects, regardless of state machine type.</summary>
internal abstract class StateMachineBox :
IValueTaskSource<TResult>, IValueTaskSource
{
/// <summary>A delegate to the MoveNext method.</summary>
protected Action? _moveNextAction;
/// <summary>Captured ExecutionContext with which to invoke MoveNext.</summary>
public ExecutionContext? Context;
/// <summary>Implementation for IValueTaskSource interfaces.</summary>
protected ManualResetValueTaskSourceCore<TResult> _valueTaskSource;
/// <summary>Completes the box with a result.</summary>
/// <param name="result">The result.</param>
public void SetResult(TResult result) =>
_valueTaskSource.SetResult(result);
/// <summary>Completes the box with an error.</summary>
/// <param name="error">The exception.</param>
public void SetException(Exception error) =>
_valueTaskSource.SetException(error);
/// <summary>Gets the status of the box.</summary>
public ValueTaskSourceStatus GetStatus(short token) => _valueTaskSource.GetStatus(token);
/// <summary>Schedules the continuation action for this box.</summary>
public void OnCompleted(Action<object?> continuation, object? state, short token, ValueTaskSourceOnCompletedFlags flags) =>
_valueTaskSource.OnCompleted(continuation, state, token, flags);
/// <summary>Gets the current version number of the box.</summary>
public short Version => _valueTaskSource.Version;
/// <summary>Implemented by derived type.</summary>
TResult IValueTaskSource<TResult>.GetResult(short token) => throw NotImplemented.ByDesign;
/// <summary>Implemented by derived type.</summary>
void IValueTaskSource.GetResult(short token) => throw NotImplemented.ByDesign;
}
private sealed class SyncSuccessSentinelStateMachineBox : StateMachineBox
{
public SyncSuccessSentinelStateMachineBox() => SetResult(default!);
}
/// <summary>Provides a strongly-typed box object based on the specific state machine type in use.</summary>
private sealed class StateMachineBox<TStateMachine> :
StateMachineBox,
IValueTaskSource<TResult>, IValueTaskSource, IAsyncStateMachineBox, IThreadPoolWorkItem
where TStateMachine : IAsyncStateMachine
{
/// <summary>Delegate used to invoke on an ExecutionContext when passed an instance of this box type.</summary>
private static readonly ContextCallback s_callback = ExecutionContextCallback;
/// <summary>Lock used to protected the shared cache of boxes.</summary>
/// <remarks>The code that uses this assumes a runtime without thread aborts.</remarks>
private static int s_cacheLock;
/// <summary>Singly-linked list cache of boxes.</summary>
private static StateMachineBox<TStateMachine>? s_cache;
/// <summary>The number of items stored in <see cref="s_cache"/>.</summary>
private static int s_cacheSize;
// TODO:
// AsyncTaskMethodBuilder logs about the state machine box lifecycle; AsyncValueTaskMethodBuilder currently
// does not when it employs these pooled boxes. That logging is based on Task IDs, which we lack here.
// We could use the box's Version, but that is very likely to conflict with the IDs of other tasks in the system.
// For now, we don't log, but should we choose to we'll probably want to store an int ID on the state machine box,
// and initialize it an ID from Task's generator.
/// <summary>If this box is stored in the cache, the next box in the cache.</summary>
private StateMachineBox<TStateMachine>? _next;
/// <summary>The state machine itself.</summary>
[AllowNull, MaybeNull]
public TStateMachine StateMachine = default;
/// <summary>Gets a box object to use for an operation. This may be a reused, pooled object, or it may be new.</summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)] // only one caller
internal static StateMachineBox<TStateMachine> GetOrCreateBox()
{
// Try to acquire the lock to access the cache. If there's any contention, don't use the cache.
if (Interlocked.CompareExchange(ref s_cacheLock, 1, 0) == 0)
{
// If there are any instances cached, take one from the cache stack and use it.
StateMachineBox<TStateMachine>? box = s_cache;
if (!(box is null))
{
s_cache = box._next;
box._next = null;
s_cacheSize--;
Debug.Assert(s_cacheSize >= 0, "Expected the cache size to be non-negative.");
// Release the lock and return the box.
Volatile.Write(ref s_cacheLock, 0);
return box;
}
// No objects were cached. We'll just create a new instance.
Debug.Assert(s_cacheSize == 0, "Expected cache size to be 0.");
// Release the lock.
Volatile.Write(ref s_cacheLock, 0);
}
// Couldn't quickly get a cached instance, so create a new instance.
return new StateMachineBox<TStateMachine>();
}
private void ReturnOrDropBox()
{
Debug.Assert(_next is null, "Expected box to not be part of cached list.");
// Clear out the state machine and associated context to avoid keeping arbitrary state referenced by
// lifted locals. We want to do this regardless of whether we end up caching the box or not, in case
// the caller keeps the box alive for an arbitrary period of time.
StateMachine = default;
Context = default;
// Reset the MRVTSC. We can either do this here, in which case we may be paying the (small) overhead
// to reset the box even if we're going to drop it, or we could do it while holding the lock, in which
// case we'll only reset it if necessary but causing the lock to be held for longer, thereby causing
// more contention. For now at least, we do it outside of the lock. (This must not be done after
// the lock is released, since at that point the instance could already be in use elsewhere.)
// We also want to increment the version number even if we're going to drop it, to maximize the chances
// that incorrectly double-awaiting a ValueTask will produce an error.
_valueTaskSource.Reset();
// If reusing the object would result in potentially wrapping around its version number, just throw it away.
// This provides a modicum of additional safety when ValueTasks are misused (helping to avoid the case where
// a ValueTask is illegally re-awaited and happens to do so at exactly 2^16 uses later on this exact same instance),
// at the expense of potentially incurring an additional allocation every 65K uses.
if ((ushort)_valueTaskSource.Version == ushort.MaxValue)
{
return;
}
// Try to acquire the cache lock. If there's any contention, or if the cache is full, we just throw away the object.
if (Interlocked.CompareExchange(ref s_cacheLock, 1, 0) == 0)
{
if (s_cacheSize < AsyncTaskCache.s_valueTaskPoolingCacheSize)
{
// Push the box onto the cache stack for subsequent reuse.
_next = s_cache;
s_cache = this;
s_cacheSize++;
Debug.Assert(s_cacheSize > 0 && s_cacheSize <= AsyncTaskCache.s_valueTaskPoolingCacheSize, "Expected cache size to be within bounds.");
}
// Release the lock.
Volatile.Write(ref s_cacheLock, 0);
}
}
/// <summary>
/// Used to initialize s_callback above. We don't use a lambda for this on purpose: a lambda would
/// introduce a new generic type behind the scenes that comes with a hefty size penalty in AOT builds.
/// </summary>
private static void ExecutionContextCallback(object? s)
{
// Only used privately to pass directly to EC.Run
Debug.Assert(s is StateMachineBox<TStateMachine>);
Unsafe.As<StateMachineBox<TStateMachine>>(s).StateMachine!.MoveNext();
}
/// <summary>A delegate to the <see cref="MoveNext()"/> method.</summary>
public Action MoveNextAction => _moveNextAction ??= new Action(MoveNext);
/// <summary>Invoked to run MoveNext when this instance is executed from the thread pool.</summary>
void IThreadPoolWorkItem.Execute() => MoveNext();
/// <summary>Calls MoveNext on <see cref="StateMachine"/></summary>
public void MoveNext()
{
ExecutionContext? context = Context;
if (context is null)
{
Debug.Assert(!(StateMachine is null));
StateMachine.MoveNext();
}
else
{
ExecutionContext.RunInternal(context, s_callback, this);
}
}
/// <summary>Get the result of the operation.</summary>
TResult IValueTaskSource<TResult>.GetResult(short token)
{
try
{
return _valueTaskSource.GetResult(token);
}
finally
{
// Reuse this instance if possible, otherwise clear and drop it.
ReturnOrDropBox();
}
}
/// <summary>Get the result of the operation.</summary>
void IValueTaskSource.GetResult(short token)
{
try
{
_valueTaskSource.GetResult(token);
}
finally
{
// Reuse this instance if possible, otherwise clear and drop it.
ReturnOrDropBox();
}
}
/// <summary>Gets the state machine as a boxed object. This should only be used for debugging purposes.</summary>
IAsyncStateMachine IAsyncStateMachineBox.GetStateMachineObject() => StateMachine!; // likely boxes, only use for debugging
}
}
}
| |
/*
* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
namespace Amazon.SimpleWorkflow.Model
{
/// <summary>
/// <para> Provides details of the <c>ActivityTaskScheduled</c> event. </para>
/// </summary>
public class ActivityTaskScheduledEventAttributes
{
private ActivityType activityType;
private string activityId;
private string input;
private string control;
private string scheduleToStartTimeout;
private string scheduleToCloseTimeout;
private string startToCloseTimeout;
private TaskList taskList;
private long? decisionTaskCompletedEventId;
private string heartbeatTimeout;
/// <summary>
/// The type of the activity task.
///
/// </summary>
public ActivityType ActivityType
{
get { return this.activityType; }
set { this.activityType = value; }
}
/// <summary>
/// Sets the ActivityType property
/// </summary>
/// <param name="activityType">The value to set for the ActivityType property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithActivityType(ActivityType activityType)
{
this.activityType = activityType;
return this;
}
// Check to see if ActivityType property is set
internal bool IsSetActivityType()
{
return this.activityType != null;
}
/// <summary>
/// The unique id of the activity task.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>1 - 256</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string ActivityId
{
get { return this.activityId; }
set { this.activityId = value; }
}
/// <summary>
/// Sets the ActivityId property
/// </summary>
/// <param name="activityId">The value to set for the ActivityId property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithActivityId(string activityId)
{
this.activityId = activityId;
return this;
}
// Check to see if ActivityId property is set
internal bool IsSetActivityId()
{
return this.activityId != null;
}
/// <summary>
/// The input provided to the activity task.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 32768</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string Input
{
get { return this.input; }
set { this.input = value; }
}
/// <summary>
/// Sets the Input property
/// </summary>
/// <param name="input">The value to set for the Input property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithInput(string input)
{
this.input = input;
return this;
}
// Check to see if Input property is set
internal bool IsSetInput()
{
return this.input != null;
}
/// <summary>
/// Optional data attached to the event that can be used by the decider in subsequent workflow tasks. This data is not sent to the activity.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 32768</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string Control
{
get { return this.control; }
set { this.control = value; }
}
/// <summary>
/// Sets the Control property
/// </summary>
/// <param name="control">The value to set for the Control property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithControl(string control)
{
this.control = control;
return this;
}
// Check to see if Control property is set
internal bool IsSetControl()
{
return this.control != null;
}
/// <summary>
/// The maximum amount of time the activity task can wait to be assigned to a worker.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 8</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string ScheduleToStartTimeout
{
get { return this.scheduleToStartTimeout; }
set { this.scheduleToStartTimeout = value; }
}
/// <summary>
/// Sets the ScheduleToStartTimeout property
/// </summary>
/// <param name="scheduleToStartTimeout">The value to set for the ScheduleToStartTimeout property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithScheduleToStartTimeout(string scheduleToStartTimeout)
{
this.scheduleToStartTimeout = scheduleToStartTimeout;
return this;
}
// Check to see if ScheduleToStartTimeout property is set
internal bool IsSetScheduleToStartTimeout()
{
return this.scheduleToStartTimeout != null;
}
/// <summary>
/// The maximum amount of time for this activity task.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 8</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string ScheduleToCloseTimeout
{
get { return this.scheduleToCloseTimeout; }
set { this.scheduleToCloseTimeout = value; }
}
/// <summary>
/// Sets the ScheduleToCloseTimeout property
/// </summary>
/// <param name="scheduleToCloseTimeout">The value to set for the ScheduleToCloseTimeout property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithScheduleToCloseTimeout(string scheduleToCloseTimeout)
{
this.scheduleToCloseTimeout = scheduleToCloseTimeout;
return this;
}
// Check to see if ScheduleToCloseTimeout property is set
internal bool IsSetScheduleToCloseTimeout()
{
return this.scheduleToCloseTimeout != null;
}
/// <summary>
/// The maximum amount of time a worker may take to process the activity task.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 8</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string StartToCloseTimeout
{
get { return this.startToCloseTimeout; }
set { this.startToCloseTimeout = value; }
}
/// <summary>
/// Sets the StartToCloseTimeout property
/// </summary>
/// <param name="startToCloseTimeout">The value to set for the StartToCloseTimeout property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithStartToCloseTimeout(string startToCloseTimeout)
{
this.startToCloseTimeout = startToCloseTimeout;
return this;
}
// Check to see if StartToCloseTimeout property is set
internal bool IsSetStartToCloseTimeout()
{
return this.startToCloseTimeout != null;
}
/// <summary>
/// The task list in which the activity task has been scheduled.
///
/// </summary>
public TaskList TaskList
{
get { return this.taskList; }
set { this.taskList = value; }
}
/// <summary>
/// Sets the TaskList property
/// </summary>
/// <param name="taskList">The value to set for the TaskList property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithTaskList(TaskList taskList)
{
this.taskList = taskList;
return this;
}
// Check to see if TaskList property is set
internal bool IsSetTaskList()
{
return this.taskList != null;
}
/// <summary>
/// The id of the <c>DecisionTaskCompleted</c> event corresponding to the decision that resulted in the scheduling of this activity task. This
/// information can be useful for diagnosing problems by tracing back the chain of events leading up to this event.
///
/// </summary>
public long DecisionTaskCompletedEventId
{
get { return this.decisionTaskCompletedEventId ?? default(long); }
set { this.decisionTaskCompletedEventId = value; }
}
/// <summary>
/// Sets the DecisionTaskCompletedEventId property
/// </summary>
/// <param name="decisionTaskCompletedEventId">The value to set for the DecisionTaskCompletedEventId property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithDecisionTaskCompletedEventId(long decisionTaskCompletedEventId)
{
this.decisionTaskCompletedEventId = decisionTaskCompletedEventId;
return this;
}
// Check to see if DecisionTaskCompletedEventId property is set
internal bool IsSetDecisionTaskCompletedEventId()
{
return this.decisionTaskCompletedEventId.HasValue;
}
/// <summary>
/// The maximum time before which the worker processing this task must report progress by calling <a>RecordActivityTaskHeartbeat</a>. If the
/// timeout is exceeded, the activity task is automatically timed out. If the worker subsequently attempts to record a heartbeat or return a
/// result, it will be ignored.
///
/// <para>
/// <b>Constraints:</b>
/// <list type="definition">
/// <item>
/// <term>Length</term>
/// <description>0 - 8</description>
/// </item>
/// </list>
/// </para>
/// </summary>
public string HeartbeatTimeout
{
get { return this.heartbeatTimeout; }
set { this.heartbeatTimeout = value; }
}
/// <summary>
/// Sets the HeartbeatTimeout property
/// </summary>
/// <param name="heartbeatTimeout">The value to set for the HeartbeatTimeout property </param>
/// <returns>this instance</returns>
[Obsolete("The With methods are obsolete and will be removed in version 2 of the AWS SDK for .NET. See http://aws.amazon.com/sdkfornet/#version2 for more information.")]
public ActivityTaskScheduledEventAttributes WithHeartbeatTimeout(string heartbeatTimeout)
{
this.heartbeatTimeout = heartbeatTimeout;
return this;
}
// Check to see if HeartbeatTimeout property is set
internal bool IsSetHeartbeatTimeout()
{
return this.heartbeatTimeout != null;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.